From 03107339afbabfb0efa13117771f02c205069375 Mon Sep 17 00:00:00 2001 From: mwiegand Date: Thu, 17 Mar 2022 22:16:30 +0100 Subject: [PATCH] Initial Commit --- .envrc | 3 + .venv/bin/Activate.ps1 | 241 + .venv/bin/activate | 66 + .venv/bin/activate.csh | 25 + .venv/bin/activate.fish | 64 + .venv/bin/jsonschema | 8 + .venv/bin/normalizer | 8 + .venv/bin/pip | 8 + .venv/bin/pip3 | 8 + .venv/bin/pip3.9 | 8 + .venv/bin/python | 1 + .venv/bin/python3 | 1 + .venv/bin/python3.9 | 1 + .../OSlash-0.6.3.dist-info/INSTALLER | 1 + .../OSlash-0.6.3.dist-info/LICENSE | 20 + .../OSlash-0.6.3.dist-info/METADATA | 128 + .../OSlash-0.6.3.dist-info/RECORD | 53 + .../OSlash-0.6.3.dist-info/WHEEL | 5 + .../OSlash-0.6.3.dist-info/top_level.txt | 1 + .../OSlash-0.6.3.dist-info/zip-safe | 1 + .../PyNaCl-1.5.0.dist-info/INSTALLER | 1 + .../PyNaCl-1.5.0.dist-info/LICENSE | 174 + .../PyNaCl-1.5.0.dist-info/METADATA | 246 + .../PyNaCl-1.5.0.dist-info/RECORD | 68 + .../PyNaCl-1.5.0.dist-info/WHEEL | 5 + .../PyNaCl-1.5.0.dist-info/top_level.txt | 2 + .../_cffi_backend.cpython-39-darwin.so | Bin 0 -> 202488 bytes .../site-packages/_distutils_hack/__init__.py | 198 + .../site-packages/_distutils_hack/override.py | 1 + .../site-packages/_pyrsistent_version.py | 1 + .../anyio-3.5.0.dist-info/INSTALLER | 1 + .../anyio-3.5.0.dist-info/LICENSE | 20 + .../anyio-3.5.0.dist-info/METADATA | 106 + .../anyio-3.5.0.dist-info/RECORD | 82 + .../site-packages/anyio-3.5.0.dist-info/WHEEL | 5 + .../anyio-3.5.0.dist-info/entry_points.txt | 3 + .../anyio-3.5.0.dist-info/top_level.txt | 1 + .../python3.9/site-packages/anyio/__init__.py | 116 + .../site-packages/anyio/_backends/__init__.py | 0 .../site-packages/anyio/_backends/_asyncio.py | 1924 ++++ .../site-packages/anyio/_backends/_trio.py | 833 ++ .../site-packages/anyio/_core/__init__.py | 0 .../site-packages/anyio/_core/_compat.py | 175 + .../site-packages/anyio/_core/_eventloop.py | 140 + .../site-packages/anyio/_core/_exceptions.py | 85 + .../site-packages/anyio/_core/_fileio.py | 529 + .../site-packages/anyio/_core/_resources.py | 16 + .../site-packages/anyio/_core/_signals.py | 22 + .../site-packages/anyio/_core/_sockets.py | 506 + .../site-packages/anyio/_core/_streams.py | 42 + .../anyio/_core/_subprocesses.py | 99 + .../anyio/_core/_synchronization.py | 566 ++ .../site-packages/anyio/_core/_tasks.py | 158 + .../site-packages/anyio/_core/_testing.py | 75 + .../site-packages/anyio/_core/_typedattr.py | 79 + .../site-packages/anyio/abc/__init__.py | 37 + .../site-packages/anyio/abc/_resources.py | 26 + .../site-packages/anyio/abc/_sockets.py | 156 + .../site-packages/anyio/abc/_streams.py | 187 + .../site-packages/anyio/abc/_subprocesses.py | 78 + .../site-packages/anyio/abc/_tasks.py | 87 + .../site-packages/anyio/abc/_testing.py | 37 + .../site-packages/anyio/from_thread.py | 416 + .../python3.9/site-packages/anyio/lowlevel.py | 160 + .../python3.9/site-packages/anyio/py.typed | 0 .../site-packages/anyio/pytest_plugin.py | 152 + .../site-packages/anyio/streams/__init__.py | 0 .../site-packages/anyio/streams/buffered.py | 116 + .../site-packages/anyio/streams/file.py | 139 + .../site-packages/anyio/streams/memory.py | 256 + .../site-packages/anyio/streams/stapled.py | 124 + .../site-packages/anyio/streams/text.py | 130 + .../site-packages/anyio/streams/tls.py | 281 + .../site-packages/anyio/to_process.py | 229 + .../site-packages/anyio/to_thread.py | 54 + .../apischema-0.16.6.dist-info/INSTALLER | 1 + .../apischema-0.16.6.dist-info/LICENSE.txt | 21 + .../apischema-0.16.6.dist-info/METADATA | 145 + .../apischema-0.16.6.dist-info/RECORD | 133 + .../apischema-0.16.6.dist-info/WHEEL | 5 + .../apischema-0.16.6.dist-info/top_level.txt | 1 + .../site-packages/apischema/__init__.py | 99 + .../site-packages/apischema/aliases.py | 53 + .../site-packages/apischema/cache.py | 54 + .../apischema/conversions/__init__.py | 43 + .../apischema/conversions/conversions.py | 111 + .../apischema/conversions/converters.py | 208 + .../apischema/conversions/dataclass_models.py | 92 + .../apischema/conversions/utils.py | 80 + .../apischema/conversions/visitor.py | 245 + .../apischema/conversions/wrappers.py | 55 + .../site-packages/apischema/dataclasses.py | 26 + .../site-packages/apischema/dependencies.py | 75 + .../apischema/dependent_required.py | 12 + .../apischema/deserialization/__init__.py | 924 ++ .../apischema/deserialization/coercion.py | 55 + .../apischema/deserialization/flattened.py | 46 + .../site-packages/apischema/fields.py | 145 + .../apischema/graphql/__init__.py | 23 + .../apischema/graphql/interfaces.py | 18 + .../apischema/graphql/relay/__init__.py | 17 + .../apischema/graphql/relay/connections.py | 85 + .../graphql/relay/global_identification.py | 160 + .../apischema/graphql/relay/mutations.py | 136 + .../apischema/graphql/relay/utils.py | 12 + .../apischema/graphql/resolvers.py | 336 + .../site-packages/apischema/graphql/schema.py | 1040 ++ .../apischema/json_schema/__init__.py | 13 + .../json_schema/conversions_resolver.py | 134 + .../apischema/json_schema/patterns.py | 23 + .../apischema/json_schema/refs.py | 148 + .../apischema/json_schema/schema.py | 737 ++ .../apischema/json_schema/types.py | 130 + .../apischema/json_schema/versions.py | 129 + .../apischema/metadata/__init__.py | 52 + .../apischema/metadata/implem.py | 105 + .../site-packages/apischema/metadata/keys.py | 17 + .../site-packages/apischema/methods.py | 137 + .../apischema/objects/__init__.py | 13 + .../apischema/objects/conversions.py | 177 + .../site-packages/apischema/objects/fields.py | 260 + .../apischema/objects/getters.py | 151 + .../apischema/objects/visitor.py | 153 + .../site-packages/apischema/ordering.py | 142 + .../site-packages/apischema/py.typed | 0 .../site-packages/apischema/recursion.py | 178 + .../apischema/schemas/__init__.py | 142 + .../apischema/schemas/annotations.py | 36 + .../apischema/schemas/constraints.py | 155 + .../apischema/serialization/__init__.py | 621 ++ .../serialization/serialized_methods.py | 175 + .../site-packages/apischema/settings.py | 82 + .../python3.9/site-packages/apischema/skip.py | 19 + .../site-packages/apischema/std_types.py | 99 + .../site-packages/apischema/tagged_unions.py | 161 + .../site-packages/apischema/type_names.py | 110 + .../site-packages/apischema/types.py | 122 + .../site-packages/apischema/typing.py | 303 + .../site-packages/apischema/utils.py | 452 + .../apischema/validation/__init__.py | 12 + .../apischema/validation/dependencies.py | 63 + .../apischema/validation/errors.py | 150 + .../apischema/validation/mock.py | 57 + .../apischema/validation/validators.py | 207 + .../site-packages/apischema/visitor.py | 218 + .../python3.9/site-packages/attr/__init__.py | 80 + .../python3.9/site-packages/attr/__init__.pyi | 484 + .../lib/python3.9/site-packages/attr/_cmp.py | 154 + .../lib/python3.9/site-packages/attr/_cmp.pyi | 13 + .../python3.9/site-packages/attr/_compat.py | 261 + .../python3.9/site-packages/attr/_config.py | 33 + .../python3.9/site-packages/attr/_funcs.py | 422 + .../lib/python3.9/site-packages/attr/_make.py | 3173 ++++++ .../python3.9/site-packages/attr/_next_gen.py | 216 + .../site-packages/attr/_version_info.py | 87 + .../site-packages/attr/_version_info.pyi | 9 + .../site-packages/attr/converters.py | 155 + .../site-packages/attr/converters.pyi | 13 + .../site-packages/attr/exceptions.py | 94 + .../site-packages/attr/exceptions.pyi | 17 + .../python3.9/site-packages/attr/filters.py | 54 + .../python3.9/site-packages/attr/filters.pyi | 6 + .../lib/python3.9/site-packages/attr/py.typed | 0 .../python3.9/site-packages/attr/setters.py | 79 + .../python3.9/site-packages/attr/setters.pyi | 19 + .../site-packages/attr/validators.py | 561 ++ .../site-packages/attr/validators.pyi | 78 + .../attrs-21.4.0.dist-info/AUTHORS.rst | 11 + .../attrs-21.4.0.dist-info/INSTALLER | 1 + .../attrs-21.4.0.dist-info/LICENSE | 21 + .../attrs-21.4.0.dist-info/METADATA | 232 + .../attrs-21.4.0.dist-info/RECORD | 56 + .../attrs-21.4.0.dist-info/WHEEL | 6 + .../attrs-21.4.0.dist-info/top_level.txt | 2 + .../python3.9/site-packages/attrs/__init__.py | 70 + .../site-packages/attrs/__init__.pyi | 63 + .../site-packages/attrs/converters.py | 3 + .../site-packages/attrs/exceptions.py | 3 + .../python3.9/site-packages/attrs/filters.py | 3 + .../python3.9/site-packages/attrs/py.typed | 0 .../python3.9/site-packages/attrs/setters.py | 3 + .../site-packages/attrs/validators.py | 3 + .../based58-0.1.0.dist-info/INSTALLER | 1 + .../based58-0.1.0.dist-info/METADATA | 101 + .../based58-0.1.0.dist-info/RECORD | 9 + .../based58-0.1.0.dist-info/WHEEL | 4 + .../site-packages/based58/__init__.py | 3 + .../site-packages/based58/__init__.pyi | 17 + .../site-packages/based58/based58.abi3.so | Bin 0 -> 1202269 bytes .../python3.9/site-packages/based58/py.typed | 0 .../cachetools-4.2.4.dist-info/INSTALLER | 1 + .../cachetools-4.2.4.dist-info/LICENSE | 20 + .../cachetools-4.2.4.dist-info/METADATA | 135 + .../cachetools-4.2.4.dist-info/RECORD | 26 + .../cachetools-4.2.4.dist-info/WHEEL | 5 + .../cachetools-4.2.4.dist-info/top_level.txt | 1 + .../cachetools-stubs/METADATA.toml | 1 + .../cachetools-stubs/__init__.pyi | 67 + .../site-packages/cachetools-stubs/cache.pyi | 2 + .../site-packages/cachetools-stubs/fifo.pyi | 2 + .../site-packages/cachetools-stubs/func.pyi | 15 + .../site-packages/cachetools-stubs/keys.pyi | 4 + .../site-packages/cachetools-stubs/lfu.pyi | 2 + .../site-packages/cachetools-stubs/lru.pyi | 2 + .../site-packages/cachetools-stubs/mru.pyi | 2 + .../site-packages/cachetools-stubs/rr.pyi | 2 + .../site-packages/cachetools-stubs/ttl.pyi | 2 + .../site-packages/cachetools/__init__.py | 596 ++ .../site-packages/cachetools/cache.py | 7 + .../site-packages/cachetools/fifo.py | 7 + .../site-packages/cachetools/func.py | 171 + .../site-packages/cachetools/keys.py | 52 + .../python3.9/site-packages/cachetools/lfu.py | 7 + .../python3.9/site-packages/cachetools/lru.py | 7 + .../python3.9/site-packages/cachetools/mru.py | 7 + .../python3.9/site-packages/cachetools/rr.py | 7 + .../python3.9/site-packages/cachetools/ttl.py | 7 + .../certifi-2021.10.8.dist-info/INSTALLER | 1 + .../certifi-2021.10.8.dist-info/LICENSE | 21 + .../certifi-2021.10.8.dist-info/METADATA | 83 + .../certifi-2021.10.8.dist-info/RECORD | 13 + .../certifi-2021.10.8.dist-info/WHEEL | 6 + .../certifi-2021.10.8.dist-info/top_level.txt | 1 + .../site-packages/certifi/__init__.py | 3 + .../site-packages/certifi/__main__.py | 12 + .../site-packages/certifi/cacert.pem | 4362 +++++++++ .../python3.9/site-packages/certifi/core.py | 60 + .../cffi-1.15.0.dist-info/INSTALLER | 1 + .../cffi-1.15.0.dist-info/LICENSE | 26 + .../cffi-1.15.0.dist-info/METADATA | 37 + .../cffi-1.15.0.dist-info/RECORD | 44 + .../site-packages/cffi-1.15.0.dist-info/WHEEL | 5 + .../cffi-1.15.0.dist-info/entry_points.txt | 3 + .../cffi-1.15.0.dist-info/top_level.txt | 2 + .../python3.9/site-packages/cffi/__init__.py | 14 + .../site-packages/cffi/_cffi_errors.h | 149 + .../site-packages/cffi/_cffi_include.h | 385 + .../python3.9/site-packages/cffi/_embedding.h | 527 + .venv/lib/python3.9/site-packages/cffi/api.py | 965 ++ .../site-packages/cffi/backend_ctypes.py | 1121 +++ .../site-packages/cffi/cffi_opcode.py | 187 + .../site-packages/cffi/commontypes.py | 80 + .../python3.9/site-packages/cffi/cparser.py | 1006 ++ .../lib/python3.9/site-packages/cffi/error.py | 31 + .../site-packages/cffi/ffiplatform.py | 127 + .../lib/python3.9/site-packages/cffi/lock.py | 30 + .../lib/python3.9/site-packages/cffi/model.py | 617 ++ .../site-packages/cffi/parse_c_type.h | 181 + .../python3.9/site-packages/cffi/pkgconfig.py | 121 + .../site-packages/cffi/recompiler.py | 1581 +++ .../site-packages/cffi/setuptools_ext.py | 219 + .../site-packages/cffi/vengine_cpy.py | 1076 +++ .../site-packages/cffi/vengine_gen.py | 675 ++ .../python3.9/site-packages/cffi/verifier.py | 307 + .../INSTALLER | 1 + .../LICENSE | 21 + .../METADATA | 269 + .../RECORD | 33 + .../charset_normalizer-2.0.12.dist-info/WHEEL | 5 + .../entry_points.txt | 3 + .../top_level.txt | 1 + .../charset_normalizer/__init__.py | 56 + .../site-packages/charset_normalizer/api.py | 608 ++ .../charset_normalizer/assets/__init__.py | 1244 +++ .../site-packages/charset_normalizer/cd.py | 340 + .../charset_normalizer/cli/__init__.py | 0 .../charset_normalizer/cli/normalizer.py | 290 + .../charset_normalizer/constant.py | 503 + .../charset_normalizer/legacy.py | 95 + .../site-packages/charset_normalizer/md.py | 559 ++ .../charset_normalizer/models.py | 392 + .../site-packages/charset_normalizer/py.typed | 0 .../site-packages/charset_normalizer/utils.py | 342 + .../charset_normalizer/version.py | 6 + .../construct-2.10.67.dist-info/INSTALLER | 1 + .../construct-2.10.67.dist-info/LICENSE | 22 + .../construct-2.10.67.dist-info/METADATA | 80 + .../construct-2.10.67.dist-info/RECORD | 28 + .../construct-2.10.67.dist-info/WHEEL | 5 + .../construct-2.10.67.dist-info/top_level.txt | 1 + .../construct-stubs/__init__.pyi | 220 + .../site-packages/construct-stubs/core.pyi | 1199 +++ .../site-packages/construct-stubs/debug.pyi | 12 + .../site-packages/construct-stubs/expr.pyi | 558 ++ .../construct-stubs/lib/__init__.pyi | 52 + .../construct-stubs/lib/binary.pyi | 11 + .../construct-stubs/lib/bitstream.pyi | 45 + .../construct-stubs/lib/containers.pyi | 31 + .../site-packages/construct-stubs/lib/hex.pyi | 12 + .../construct-stubs/lib/py3compat.pyi | 20 + .../site-packages/construct-stubs/version.pyi | 5 + .../site-packages/construct/__init__.py | 211 + .../python3.9/site-packages/construct/core.py | 6111 ++++++++++++ .../site-packages/construct/debug.py | 160 + .../python3.9/site-packages/construct/expr.py | 256 + .../site-packages/construct/lib/__init__.py | 52 + .../site-packages/construct/lib/binary.py | 168 + .../site-packages/construct/lib/bitstream.py | 147 + .../site-packages/construct/lib/containers.py | 306 + .../site-packages/construct/lib/hex.py | 94 + .../site-packages/construct/lib/py3compat.py | 51 + .../site-packages/construct/version.py | 3 + .../site-packages/construct_typed/__init__.py | 44 + .../construct_typed/dataclass_struct.py | 272 + .../construct_typed/generic_wrapper.py | 41 + .../site-packages/construct_typed/py.typed | 0 .../site-packages/construct_typed/tenum.py | 125 + .../site-packages/construct_typed/version.py | 2 + .../INSTALLER | 1 + .../construct_typing-0.5.2.dist-info/LICENSE | 21 + .../construct_typing-0.5.2.dist-info/METADATA | 141 + .../construct_typing-0.5.2.dist-info/RECORD | 28 + .../construct_typing-0.5.2.dist-info/WHEEL | 5 + .../top_level.txt | 2 + .../site-packages/distutils-precedence.pth | 1 + .../h11-0.12.0.dist-info/INSTALLER | 1 + .../h11-0.12.0.dist-info/LICENSE.txt | 22 + .../h11-0.12.0.dist-info/METADATA | 194 + .../site-packages/h11-0.12.0.dist-info/RECORD | 51 + .../site-packages/h11-0.12.0.dist-info/WHEEL | 5 + .../h11-0.12.0.dist-info/top_level.txt | 1 + .../python3.9/site-packages/h11/__init__.py | 21 + .../lib/python3.9/site-packages/h11/_abnf.py | 129 + .../site-packages/h11/_connection.py | 585 ++ .../python3.9/site-packages/h11/_events.py | 302 + .../python3.9/site-packages/h11/_headers.py | 242 + .../python3.9/site-packages/h11/_readers.py | 222 + .../site-packages/h11/_receivebuffer.py | 152 + .../lib/python3.9/site-packages/h11/_state.py | 307 + .../lib/python3.9/site-packages/h11/_util.py | 122 + .../python3.9/site-packages/h11/_version.py | 16 + .../python3.9/site-packages/h11/_writers.py | 123 + .../site-packages/h11/tests/__init__.py | 0 .../site-packages/h11/tests/data/test-file | 1 + .../site-packages/h11/tests/helpers.py | 77 + .../h11/tests/test_against_stdlib_http.py | 111 + .../h11/tests/test_connection.py | 1078 +++ .../site-packages/h11/tests/test_events.py | 179 + .../site-packages/h11/tests/test_headers.py | 151 + .../site-packages/h11/tests/test_helpers.py | 23 + .../site-packages/h11/tests/test_io.py | 544 ++ .../h11/tests/test_receivebuffer.py | 134 + .../site-packages/h11/tests/test_state.py | 250 + .../site-packages/h11/tests/test_util.py | 99 + .../httpcore-0.13.7.dist-info/INSTALLER | 1 + .../httpcore-0.13.7.dist-info/LICENSE.md | 27 + .../httpcore-0.13.7.dist-info/METADATA | 422 + .../httpcore-0.13.7.dist-info/RECORD | 67 + .../httpcore-0.13.7.dist-info/WHEEL | 5 + .../httpcore-0.13.7.dist-info/top_level.txt | 4 + .../site-packages/httpcore/__init__.py | 63 + .../site-packages/httpcore/_async/__init__.py | 0 .../site-packages/httpcore/_async/base.py | 122 + .../httpcore/_async/connection.py | 220 + .../httpcore/_async/connection_pool.py | 362 + .../site-packages/httpcore/_async/http.py | 42 + .../site-packages/httpcore/_async/http11.py | 269 + .../site-packages/httpcore/_async/http2.py | 446 + .../httpcore/_async/http_proxy.py | 290 + .../httpcore/_backends/__init__.py | 0 .../site-packages/httpcore/_backends/anyio.py | 201 + .../httpcore/_backends/asyncio.py | 303 + .../site-packages/httpcore/_backends/auto.py | 67 + .../site-packages/httpcore/_backends/base.py | 137 + .../site-packages/httpcore/_backends/curio.py | 206 + .../site-packages/httpcore/_backends/sync.py | 178 + .../site-packages/httpcore/_backends/trio.py | 212 + .../site-packages/httpcore/_bytestreams.py | 96 + .../site-packages/httpcore/_exceptions.py | 79 + .../site-packages/httpcore/_sync/__init__.py | 0 .../site-packages/httpcore/_sync/base.py | 122 + .../httpcore/_sync/connection.py | 220 + .../httpcore/_sync/connection_pool.py | 362 + .../site-packages/httpcore/_sync/http.py | 42 + .../site-packages/httpcore/_sync/http11.py | 269 + .../site-packages/httpcore/_sync/http2.py | 446 + .../httpcore/_sync/http_proxy.py | 290 + .../site-packages/httpcore/_threadlock.py | 35 + .../site-packages/httpcore/_types.py | 12 + .../site-packages/httpcore/_utils.py | 105 + .../python3.9/site-packages/httpcore/py.typed | 0 .../httpx-0.18.2.dist-info/INSTALLER | 1 + .../httpx-0.18.2.dist-info/LICENSE.md | 12 + .../httpx-0.18.2.dist-info/METADATA | 972 ++ .../httpx-0.18.2.dist-info/RECORD | 49 + .../httpx-0.18.2.dist-info/WHEEL | 5 + .../httpx-0.18.2.dist-info/top_level.txt | 2 + .../python3.9/site-packages/httpx/__init__.py | 124 + .../site-packages/httpx/__version__.py | 3 + .../lib/python3.9/site-packages/httpx/_api.py | 445 + .../python3.9/site-packages/httpx/_auth.py | 304 + .../python3.9/site-packages/httpx/_client.py | 1982 ++++ .../python3.9/site-packages/httpx/_compat.py | 25 + .../python3.9/site-packages/httpx/_config.py | 358 + .../python3.9/site-packages/httpx/_content.py | 207 + .../site-packages/httpx/_decoders.py | 369 + .../site-packages/httpx/_exceptions.py | 339 + .../python3.9/site-packages/httpx/_models.py | 1843 ++++ .../site-packages/httpx/_multipart.py | 205 + .../site-packages/httpx/_status_codes.py | 143 + .../httpx/_transports/__init__.py | 0 .../site-packages/httpx/_transports/asgi.py | 169 + .../site-packages/httpx/_transports/base.py | 183 + .../httpx/_transports/default.py | 296 + .../site-packages/httpx/_transports/mock.py | 70 + .../site-packages/httpx/_transports/wsgi.py | 138 + .../python3.9/site-packages/httpx/_types.py | 91 + .../python3.9/site-packages/httpx/_utils.py | 508 + .../python3.9/site-packages/httpx/py.typed | 0 .../idna-3.3.dist-info/INSTALLER | 1 + .../idna-3.3.dist-info/LICENSE.md | 29 + .../site-packages/idna-3.3.dist-info/METADATA | 236 + .../site-packages/idna-3.3.dist-info/RECORD | 23 + .../site-packages/idna-3.3.dist-info/WHEEL | 5 + .../idna-3.3.dist-info/top_level.txt | 1 + .../python3.9/site-packages/idna/__init__.py | 44 + .../lib/python3.9/site-packages/idna/codec.py | 112 + .../python3.9/site-packages/idna/compat.py | 13 + .../lib/python3.9/site-packages/idna/core.py | 397 + .../python3.9/site-packages/idna/idnadata.py | 2137 +++++ .../python3.9/site-packages/idna/intranges.py | 54 + .../site-packages/idna/package_data.py | 2 + .../lib/python3.9/site-packages/idna/py.typed | 0 .../python3.9/site-packages/idna/uts46data.py | 8512 +++++++++++++++++ .../jsonrpcclient-4.0.2.dist-info/INSTALLER | 1 + .../jsonrpcclient-4.0.2.dist-info/LICENSE | 21 + .../jsonrpcclient-4.0.2.dist-info/METADATA | 54 + .../jsonrpcclient-4.0.2.dist-info/RECORD | 19 + .../jsonrpcclient-4.0.2.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../site-packages/jsonrpcclient/__init__.py | 13 + .../jsonrpcclient/id_generators.py | 61 + .../site-packages/jsonrpcclient/py.typed | 0 .../site-packages/jsonrpcclient/requests.py | 67 + .../site-packages/jsonrpcclient/responses.py | 49 + .../site-packages/jsonrpcclient/sentinels.py | 12 + .../site-packages/jsonrpcclient/utils.py | 9 + .../jsonrpcserver-5.0.7.dist-info/INSTALLER | 1 + .../jsonrpcserver-5.0.7.dist-info/LICENSE | 21 + .../jsonrpcserver-5.0.7.dist-info/METADATA | 56 + .../jsonrpcserver-5.0.7.dist-info/RECORD | 36 + .../jsonrpcserver-5.0.7.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../site-packages/jsonrpcserver/__init__.py | 28 + .../jsonrpcserver/async_dispatcher.py | 104 + .../site-packages/jsonrpcserver/async_main.py | 53 + .../site-packages/jsonrpcserver/codes.py | 8 + .../site-packages/jsonrpcserver/dispatcher.py | 282 + .../site-packages/jsonrpcserver/exceptions.py | 10 + .../site-packages/jsonrpcserver/main.py | 115 + .../site-packages/jsonrpcserver/methods.py | 43 + .../site-packages/jsonrpcserver/py.typed | 0 .../jsonrpcserver/request-schema.json | 39 + .../site-packages/jsonrpcserver/request.py | 12 + .../site-packages/jsonrpcserver/response.py | 103 + .../site-packages/jsonrpcserver/result.py | 67 + .../site-packages/jsonrpcserver/sentinels.py | 19 + .../site-packages/jsonrpcserver/server.py | 21 + .../site-packages/jsonrpcserver/utils.py | 16 + .../jsonschema-4.4.0.dist-info/COPYING | 19 + .../jsonschema-4.4.0.dist-info/INSTALLER | 1 + .../jsonschema-4.4.0.dist-info/METADATA | 200 + .../jsonschema-4.4.0.dist-info/RECORD | 69 + .../jsonschema-4.4.0.dist-info/WHEEL | 5 + .../entry_points.txt | 3 + .../jsonschema-4.4.0.dist-info/top_level.txt | 1 + .../site-packages/jsonschema/__init__.py | 58 + .../site-packages/jsonschema/__main__.py | 3 + .../site-packages/jsonschema/_format.py | 482 + .../jsonschema/_legacy_validators.py | 224 + .../site-packages/jsonschema/_reflect.py | 149 + .../site-packages/jsonschema/_types.py | 217 + .../site-packages/jsonschema/_utils.py | 348 + .../site-packages/jsonschema/_validators.py | 463 + .../jsonschema/benchmarks/__init__.py | 5 + .../jsonschema/benchmarks/issue232.py | 25 + .../benchmarks/json_schema_test_suite.py | 12 + .../python3.9/site-packages/jsonschema/cli.py | 284 + .../site-packages/jsonschema/exceptions.py | 363 + .../site-packages/jsonschema/protocols.py | 167 + .../jsonschema/schemas/draft2019-09.json | 42 + .../jsonschema/schemas/draft2020-12.json | 58 + .../jsonschema/schemas/draft3.json | 177 + .../jsonschema/schemas/draft4.json | 149 + .../jsonschema/schemas/draft6.json | 153 + .../jsonschema/schemas/draft7.json | 166 + .../jsonschema/schemas/vocabularies.json | 1 + .../jsonschema/tests/__init__.py | 0 .../jsonschema/tests/_helpers.py | 5 + .../site-packages/jsonschema/tests/_suite.py | 228 + .../jsonschema/tests/fuzz_validate.py | 49 + .../jsonschema/tests/test_cli.py | 911 ++ .../jsonschema/tests/test_deprecations.py | 123 + .../jsonschema/tests/test_exceptions.py | 475 + .../jsonschema/tests/test_format.py | 107 + .../tests/test_jsonschema_test_suite.py | 447 + .../jsonschema/tests/test_types.py | 217 + .../jsonschema/tests/test_utils.py | 124 + .../jsonschema/tests/test_validators.py | 2169 +++++ .../site-packages/jsonschema/validators.py | 1058 ++ .../python3.9/site-packages/nacl/__init__.py | 39 + .../site-packages/nacl/_sodium.abi3.so | Bin 0 -> 1021039 bytes .../site-packages/nacl/bindings/__init__.py | 451 + .../nacl/bindings/crypto_aead.py | 559 ++ .../site-packages/nacl/bindings/crypto_box.py | 324 + .../nacl/bindings/crypto_core.py | 412 + .../nacl/bindings/crypto_generichash.py | 281 + .../nacl/bindings/crypto_hash.py | 63 + .../site-packages/nacl/bindings/crypto_kx.py | 200 + .../nacl/bindings/crypto_pwhash.py | 600 ++ .../nacl/bindings/crypto_scalarmult.py | 240 + .../nacl/bindings/crypto_secretbox.py | 86 + .../nacl/bindings/crypto_secretstream.py | 357 + .../nacl/bindings/crypto_shorthash.py | 81 + .../nacl/bindings/crypto_sign.py | 327 + .../nacl/bindings/randombytes.py | 51 + .../nacl/bindings/sodium_core.py | 33 + .../site-packages/nacl/bindings/utils.py | 141 + .../python3.9/site-packages/nacl/encoding.py | 105 + .../site-packages/nacl/exceptions.py | 88 + .../lib/python3.9/site-packages/nacl/hash.py | 182 + .../python3.9/site-packages/nacl/hashlib.py | 143 + .../python3.9/site-packages/nacl/public.py | 423 + .../site-packages/nacl/pwhash/__init__.py | 75 + .../site-packages/nacl/pwhash/_argon2.py | 49 + .../site-packages/nacl/pwhash/argon2i.py | 132 + .../site-packages/nacl/pwhash/argon2id.py | 135 + .../site-packages/nacl/pwhash/scrypt.py | 211 + .../lib/python3.9/site-packages/nacl/py.typed | 0 .../python3.9/site-packages/nacl/secret.py | 305 + .../python3.9/site-packages/nacl/signing.py | 250 + .../lib/python3.9/site-packages/nacl/utils.py | 88 + .../site-packages/oslash/__init__.py | 19 + .../site-packages/oslash/_version.py | 21 + .../python3.9/site-packages/oslash/cont.py | 88 + .../lib/python3.9/site-packages/oslash/do.py | 207 + .../python3.9/site-packages/oslash/either.py | 145 + .../site-packages/oslash/identity.py | 63 + .../site-packages/oslash/ioaction.py | 239 + .../python3.9/site-packages/oslash/list.py | 319 + .../python3.9/site-packages/oslash/maybe.py | 259 + .../python3.9/site-packages/oslash/monadic.py | 20 + .../site-packages/oslash/observable.py | 97 + .../python3.9/site-packages/oslash/reader.py | 154 + .../python3.9/site-packages/oslash/state.py | 79 + .../site-packages/oslash/typing/__init__.py | 5 + .../oslash/typing/applicative.py | 58 + .../site-packages/oslash/typing/functor.py | 46 + .../site-packages/oslash/typing/monad.py | 83 + .../site-packages/oslash/typing/monoid.py | 40 + .../site-packages/oslash/util/__init__.py | 3 + .../site-packages/oslash/util/basic.py | 6 + .../python3.9/site-packages/oslash/util/fn.py | 93 + .../site-packages/oslash/util/numerals.py | 46 + .../python3.9/site-packages/oslash/writer.py | 121 + .../pip-22.0.4.dist-info/INSTALLER | 1 + .../pip-22.0.4.dist-info/LICENSE.txt | 20 + .../pip-22.0.4.dist-info/METADATA | 92 + .../site-packages/pip-22.0.4.dist-info/RECORD | 1053 ++ .../pip-22.0.4.dist-info/REQUESTED | 0 .../site-packages/pip-22.0.4.dist-info/WHEEL | 5 + .../pip-22.0.4.dist-info/entry_points.txt | 5 + .../pip-22.0.4.dist-info/top_level.txt | 1 + .../python3.9/site-packages/pip/__init__.py | 13 + .../python3.9/site-packages/pip/__main__.py | 31 + .../site-packages/pip/_internal/__init__.py | 19 + .../site-packages/pip/_internal/build_env.py | 296 + .../site-packages/pip/_internal/cache.py | 264 + .../pip/_internal/cli/__init__.py | 4 + .../pip/_internal/cli/autocompletion.py | 171 + .../pip/_internal/cli/base_command.py | 223 + .../pip/_internal/cli/cmdoptions.py | 1018 ++ .../pip/_internal/cli/command_context.py | 27 + .../site-packages/pip/_internal/cli/main.py | 70 + .../pip/_internal/cli/main_parser.py | 87 + .../site-packages/pip/_internal/cli/parser.py | 292 + .../pip/_internal/cli/progress_bars.py | 321 + .../pip/_internal/cli/req_command.py | 506 + .../pip/_internal/cli/spinners.py | 157 + .../pip/_internal/cli/status_codes.py | 6 + .../pip/_internal/commands/__init__.py | 127 + .../pip/_internal/commands/cache.py | 223 + .../pip/_internal/commands/check.py | 53 + .../pip/_internal/commands/completion.py | 96 + .../pip/_internal/commands/configuration.py | 266 + .../pip/_internal/commands/debug.py | 202 + .../pip/_internal/commands/download.py | 140 + .../pip/_internal/commands/freeze.py | 97 + .../pip/_internal/commands/hash.py | 59 + .../pip/_internal/commands/help.py | 41 + .../pip/_internal/commands/index.py | 139 + .../pip/_internal/commands/install.py | 771 ++ .../pip/_internal/commands/list.py | 361 + .../pip/_internal/commands/search.py | 174 + .../pip/_internal/commands/show.py | 178 + .../pip/_internal/commands/uninstall.py | 105 + .../pip/_internal/commands/wheel.py | 178 + .../pip/_internal/configuration.py | 366 + .../pip/_internal/distributions/__init__.py | 21 + .../pip/_internal/distributions/base.py | 36 + .../pip/_internal/distributions/installed.py | 20 + .../pip/_internal/distributions/sdist.py | 127 + .../pip/_internal/distributions/wheel.py | 31 + .../site-packages/pip/_internal/exceptions.py | 658 ++ .../pip/_internal/index/__init__.py | 2 + .../pip/_internal/index/collector.py | 610 ++ .../pip/_internal/index/package_finder.py | 1004 ++ .../pip/_internal/index/sources.py | 224 + .../pip/_internal/locations/__init__.py | 520 + .../pip/_internal/locations/_distutils.py | 169 + .../pip/_internal/locations/_sysconfig.py | 219 + .../pip/_internal/locations/base.py | 52 + .../site-packages/pip/_internal/main.py | 12 + .../pip/_internal/metadata/__init__.py | 62 + .../pip/_internal/metadata/base.py | 546 ++ .../pip/_internal/metadata/pkg_resources.py | 256 + .../pip/_internal/models/__init__.py | 2 + .../pip/_internal/models/candidate.py | 34 + .../pip/_internal/models/direct_url.py | 220 + .../pip/_internal/models/format_control.py | 80 + .../pip/_internal/models/index.py | 28 + .../pip/_internal/models/link.py | 288 + .../pip/_internal/models/scheme.py | 31 + .../pip/_internal/models/search_scope.py | 129 + .../pip/_internal/models/selection_prefs.py | 51 + .../pip/_internal/models/target_python.py | 110 + .../pip/_internal/models/wheel.py | 89 + .../pip/_internal/network/__init__.py | 2 + .../pip/_internal/network/auth.py | 323 + .../pip/_internal/network/cache.py | 69 + .../pip/_internal/network/download.py | 185 + .../pip/_internal/network/lazy_wheel.py | 210 + .../pip/_internal/network/session.py | 454 + .../pip/_internal/network/utils.py | 96 + .../pip/_internal/network/xmlrpc.py | 60 + .../pip/_internal/operations/__init__.py | 0 .../_internal/operations/build/__init__.py | 0 .../_internal/operations/build/metadata.py | 39 + .../operations/build/metadata_editable.py | 41 + .../operations/build/metadata_legacy.py | 74 + .../pip/_internal/operations/build/wheel.py | 37 + .../operations/build/wheel_editable.py | 46 + .../operations/build/wheel_legacy.py | 102 + .../pip/_internal/operations/check.py | 149 + .../pip/_internal/operations/freeze.py | 254 + .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 47 + .../_internal/operations/install/legacy.py | 120 + .../pip/_internal/operations/install/wheel.py | 738 ++ .../pip/_internal/operations/prepare.py | 642 ++ .../site-packages/pip/_internal/pyproject.py | 168 + .../pip/_internal/req/__init__.py | 94 + .../pip/_internal/req/constructors.py | 490 + .../pip/_internal/req/req_file.py | 536 ++ .../pip/_internal/req/req_install.py | 858 ++ .../pip/_internal/req/req_set.py | 189 + .../pip/_internal/req/req_tracker.py | 124 + .../pip/_internal/req/req_uninstall.py | 633 ++ .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 + .../_internal/resolution/legacy/__init__.py | 0 .../_internal/resolution/legacy/resolver.py | 467 + .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 141 + .../resolution/resolvelib/candidates.py | 547 ++ .../resolution/resolvelib/factory.py | 739 ++ .../resolution/resolvelib/found_candidates.py | 155 + .../resolution/resolvelib/provider.py | 248 + .../resolution/resolvelib/reporter.py | 68 + .../resolution/resolvelib/requirements.py | 166 + .../resolution/resolvelib/resolver.py | 298 + .../pip/_internal/self_outdated_check.py | 189 + .../pip/_internal/utils/__init__.py | 0 .../site-packages/pip/_internal/utils/_log.py | 38 + .../pip/_internal/utils/appdirs.py | 52 + .../pip/_internal/utils/compat.py | 63 + .../pip/_internal/utils/compatibility_tags.py | 165 + .../pip/_internal/utils/datetime.py | 11 + .../pip/_internal/utils/deprecation.py | 120 + .../pip/_internal/utils/direct_url_helpers.py | 87 + .../pip/_internal/utils/distutils_args.py | 42 + .../pip/_internal/utils/egg_link.py | 75 + .../pip/_internal/utils/encoding.py | 36 + .../pip/_internal/utils/entrypoints.py | 27 + .../pip/_internal/utils/filesystem.py | 182 + .../pip/_internal/utils/filetypes.py | 27 + .../pip/_internal/utils/glibc.py | 88 + .../pip/_internal/utils/hashes.py | 144 + .../_internal/utils/inject_securetransport.py | 35 + .../pip/_internal/utils/logging.py | 343 + .../site-packages/pip/_internal/utils/misc.py | 629 ++ .../pip/_internal/utils/models.py | 39 + .../pip/_internal/utils/packaging.py | 57 + .../pip/_internal/utils/setuptools_build.py | 195 + .../pip/_internal/utils/subprocess.py | 260 + .../pip/_internal/utils/temp_dir.py | 246 + .../pip/_internal/utils/unpacking.py | 258 + .../site-packages/pip/_internal/utils/urls.py | 62 + .../pip/_internal/utils/virtualenv.py | 104 + .../pip/_internal/utils/wheel.py | 136 + .../pip/_internal/vcs/__init__.py | 15 + .../site-packages/pip/_internal/vcs/bazaar.py | 101 + .../site-packages/pip/_internal/vcs/git.py | 526 + .../pip/_internal/vcs/mercurial.py | 163 + .../pip/_internal/vcs/subversion.py | 324 + .../pip/_internal/vcs/versioncontrol.py | 705 ++ .../pip/_internal/wheel_builder.py | 377 + .../site-packages/pip/_vendor/__init__.py | 111 + .../pip/_vendor/cachecontrol/__init__.py | 18 + .../pip/_vendor/cachecontrol/_cmd.py | 61 + .../pip/_vendor/cachecontrol/adapter.py | 137 + .../pip/_vendor/cachecontrol/cache.py | 43 + .../_vendor/cachecontrol/caches/__init__.py | 6 + .../_vendor/cachecontrol/caches/file_cache.py | 150 + .../cachecontrol/caches/redis_cache.py | 37 + .../pip/_vendor/cachecontrol/compat.py | 32 + .../pip/_vendor/cachecontrol/controller.py | 415 + .../pip/_vendor/cachecontrol/filewrapper.py | 111 + .../pip/_vendor/cachecontrol/heuristics.py | 139 + .../pip/_vendor/cachecontrol/serialize.py | 186 + .../pip/_vendor/cachecontrol/wrapper.py | 33 + .../pip/_vendor/certifi/__init__.py | 3 + .../pip/_vendor/certifi/__main__.py | 12 + .../pip/_vendor/certifi/cacert.pem | 4362 +++++++++ .../site-packages/pip/_vendor/certifi/core.py | 76 + .../pip/_vendor/chardet/__init__.py | 83 + .../pip/_vendor/chardet/big5freq.py | 386 + .../pip/_vendor/chardet/big5prober.py | 47 + .../pip/_vendor/chardet/chardistribution.py | 233 + .../pip/_vendor/chardet/charsetgroupprober.py | 107 + .../pip/_vendor/chardet/charsetprober.py | 145 + .../pip/_vendor/chardet/cli/__init__.py | 1 + .../pip/_vendor/chardet/cli/chardetect.py | 84 + .../pip/_vendor/chardet/codingstatemachine.py | 88 + .../pip/_vendor/chardet/compat.py | 36 + .../pip/_vendor/chardet/cp949prober.py | 49 + .../pip/_vendor/chardet/enums.py | 76 + .../pip/_vendor/chardet/escprober.py | 101 + .../pip/_vendor/chardet/escsm.py | 246 + .../pip/_vendor/chardet/eucjpprober.py | 92 + .../pip/_vendor/chardet/euckrfreq.py | 195 + .../pip/_vendor/chardet/euckrprober.py | 47 + .../pip/_vendor/chardet/euctwfreq.py | 387 + .../pip/_vendor/chardet/euctwprober.py | 46 + .../pip/_vendor/chardet/gb2312freq.py | 283 + .../pip/_vendor/chardet/gb2312prober.py | 46 + .../pip/_vendor/chardet/hebrewprober.py | 292 + .../pip/_vendor/chardet/jisfreq.py | 325 + .../pip/_vendor/chardet/jpcntx.py | 233 + .../pip/_vendor/chardet/langbulgarianmodel.py | 4650 +++++++++ .../pip/_vendor/chardet/langgreekmodel.py | 4398 +++++++++ .../pip/_vendor/chardet/langhebrewmodel.py | 4383 +++++++++ .../pip/_vendor/chardet/langhungarianmodel.py | 4650 +++++++++ .../pip/_vendor/chardet/langrussianmodel.py | 5718 +++++++++++ .../pip/_vendor/chardet/langthaimodel.py | 4383 +++++++++ .../pip/_vendor/chardet/langturkishmodel.py | 4383 +++++++++ .../pip/_vendor/chardet/latin1prober.py | 145 + .../pip/_vendor/chardet/mbcharsetprober.py | 91 + .../pip/_vendor/chardet/mbcsgroupprober.py | 54 + .../pip/_vendor/chardet/mbcssm.py | 572 ++ .../pip/_vendor/chardet/metadata/__init__.py | 0 .../pip/_vendor/chardet/metadata/languages.py | 310 + .../pip/_vendor/chardet/sbcharsetprober.py | 145 + .../pip/_vendor/chardet/sbcsgroupprober.py | 83 + .../pip/_vendor/chardet/sjisprober.py | 92 + .../pip/_vendor/chardet/universaldetector.py | 286 + .../pip/_vendor/chardet/utf8prober.py | 82 + .../pip/_vendor/chardet/version.py | 9 + .../pip/_vendor/colorama/__init__.py | 6 + .../pip/_vendor/colorama/ansi.py | 102 + .../pip/_vendor/colorama/ansitowin32.py | 258 + .../pip/_vendor/colorama/initialise.py | 80 + .../pip/_vendor/colorama/win32.py | 152 + .../pip/_vendor/colorama/winterm.py | 169 + .../pip/_vendor/distlib/__init__.py | 23 + .../pip/_vendor/distlib/_backport/__init__.py | 6 + .../pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 764 ++ .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 786 ++ .../pip/_vendor/distlib/_backport/tarfile.py | 2607 +++++ .../pip/_vendor/distlib/compat.py | 1122 +++ .../pip/_vendor/distlib/database.py | 1339 +++ .../pip/_vendor/distlib/index.py | 509 + .../pip/_vendor/distlib/locators.py | 1300 +++ .../pip/_vendor/distlib/manifest.py | 393 + .../pip/_vendor/distlib/markers.py | 147 + .../pip/_vendor/distlib/metadata.py | 1058 ++ .../pip/_vendor/distlib/resources.py | 358 + .../pip/_vendor/distlib/scripts.py | 429 + .../site-packages/pip/_vendor/distlib/t32.exe | Bin 0 -> 96768 bytes .../pip/_vendor/distlib/t64-arm.exe | Bin 0 -> 180736 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 0 -> 105984 bytes .../site-packages/pip/_vendor/distlib/util.py | 1969 ++++ .../pip/_vendor/distlib/version.py | 739 ++ .../site-packages/pip/_vendor/distlib/w32.exe | Bin 0 -> 90112 bytes .../pip/_vendor/distlib/w64-arm.exe | Bin 0 -> 166400 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 0 -> 99840 bytes .../pip/_vendor/distlib/wheel.py | 1053 ++ .../site-packages/pip/_vendor/distro.py | 1386 +++ .../pip/_vendor/html5lib/__init__.py | 35 + .../pip/_vendor/html5lib/_ihatexml.py | 289 + .../pip/_vendor/html5lib/_inputstream.py | 918 ++ .../pip/_vendor/html5lib/_tokenizer.py | 1735 ++++ .../pip/_vendor/html5lib/_trie/__init__.py | 5 + .../pip/_vendor/html5lib/_trie/_base.py | 40 + .../pip/_vendor/html5lib/_trie/py.py | 67 + .../pip/_vendor/html5lib/_utils.py | 159 + .../pip/_vendor/html5lib/constants.py | 2946 ++++++ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../filters/alphabeticalattributes.py | 29 + .../pip/_vendor/html5lib/filters/base.py | 12 + .../html5lib/filters/inject_meta_charset.py | 73 + .../pip/_vendor/html5lib/filters/lint.py | 93 + .../_vendor/html5lib/filters/optionaltags.py | 207 + .../pip/_vendor/html5lib/filters/sanitizer.py | 916 ++ .../_vendor/html5lib/filters/whitespace.py | 38 + .../pip/_vendor/html5lib/html5parser.py | 2795 ++++++ .../pip/_vendor/html5lib/serializer.py | 409 + .../_vendor/html5lib/treeadapters/__init__.py | 30 + .../_vendor/html5lib/treeadapters/genshi.py | 54 + .../pip/_vendor/html5lib/treeadapters/sax.py | 50 + .../_vendor/html5lib/treebuilders/__init__.py | 88 + .../pip/_vendor/html5lib/treebuilders/base.py | 417 + .../pip/_vendor/html5lib/treebuilders/dom.py | 239 + .../_vendor/html5lib/treebuilders/etree.py | 343 + .../html5lib/treebuilders/etree_lxml.py | 392 + .../_vendor/html5lib/treewalkers/__init__.py | 154 + .../pip/_vendor/html5lib/treewalkers/base.py | 252 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 131 + .../html5lib/treewalkers/etree_lxml.py | 215 + .../_vendor/html5lib/treewalkers/genshi.py | 69 + .../pip/_vendor/idna/__init__.py | 44 + .../site-packages/pip/_vendor/idna/codec.py | 112 + .../site-packages/pip/_vendor/idna/compat.py | 13 + .../site-packages/pip/_vendor/idna/core.py | 397 + .../pip/_vendor/idna/idnadata.py | 2137 +++++ .../pip/_vendor/idna/intranges.py | 54 + .../pip/_vendor/idna/package_data.py | 2 + .../pip/_vendor/idna/uts46data.py | 8512 +++++++++++++++++ .../pip/_vendor/msgpack/__init__.py | 54 + .../pip/_vendor/msgpack/_version.py | 1 + .../pip/_vendor/msgpack/exceptions.py | 48 + .../site-packages/pip/_vendor/msgpack/ext.py | 193 + .../pip/_vendor/msgpack/fallback.py | 1012 ++ .../pip/_vendor/packaging/__about__.py | 26 + .../pip/_vendor/packaging/__init__.py | 25 + .../pip/_vendor/packaging/_manylinux.py | 301 + .../pip/_vendor/packaging/_musllinux.py | 136 + .../pip/_vendor/packaging/_structures.py | 61 + .../pip/_vendor/packaging/markers.py | 304 + .../pip/_vendor/packaging/requirements.py | 146 + .../pip/_vendor/packaging/specifiers.py | 802 ++ .../pip/_vendor/packaging/tags.py | 487 + .../pip/_vendor/packaging/utils.py | 136 + .../pip/_vendor/packaging/version.py | 504 + .../pip/_vendor/pep517/__init__.py | 6 + .../site-packages/pip/_vendor/pep517/build.py | 127 + .../site-packages/pip/_vendor/pep517/check.py | 207 + .../pip/_vendor/pep517/colorlog.py | 115 + .../pip/_vendor/pep517/compat.py | 51 + .../pip/_vendor/pep517/dirtools.py | 44 + .../pip/_vendor/pep517/envbuild.py | 171 + .../pip/_vendor/pep517/in_process/__init__.py | 17 + .../_vendor/pep517/in_process/_in_process.py | 363 + .../site-packages/pip/_vendor/pep517/meta.py | 92 + .../pip/_vendor/pep517/wrappers.py | 375 + .../pip/_vendor/pkg_resources/__init__.py | 3296 +++++++ .../pip/_vendor/pkg_resources/py31compat.py | 23 + .../pip/_vendor/platformdirs/__init__.py | 331 + .../pip/_vendor/platformdirs/__main__.py | 46 + .../pip/_vendor/platformdirs/android.py | 119 + .../pip/_vendor/platformdirs/api.py | 156 + .../pip/_vendor/platformdirs/macos.py | 64 + .../pip/_vendor/platformdirs/unix.py | 181 + .../pip/_vendor/platformdirs/version.py | 4 + .../pip/_vendor/platformdirs/windows.py | 182 + .../pip/_vendor/progress/__init__.py | 189 + .../site-packages/pip/_vendor/progress/bar.py | 93 + .../pip/_vendor/progress/colors.py | 79 + .../pip/_vendor/progress/counter.py | 47 + .../pip/_vendor/progress/spinner.py | 45 + .../pip/_vendor/pygments/__init__.py | 83 + .../pip/_vendor/pygments/__main__.py | 17 + .../pip/_vendor/pygments/cmdline.py | 663 ++ .../pip/_vendor/pygments/console.py | 70 + .../pip/_vendor/pygments/filter.py | 71 + .../pip/_vendor/pygments/filters/__init__.py | 937 ++ .../pip/_vendor/pygments/formatter.py | 94 + .../_vendor/pygments/formatters/__init__.py | 153 + .../_vendor/pygments/formatters/_mapping.py | 84 + .../pip/_vendor/pygments/formatters/bbcode.py | 108 + .../pip/_vendor/pygments/formatters/groff.py | 168 + .../pip/_vendor/pygments/formatters/html.py | 983 ++ .../pip/_vendor/pygments/formatters/img.py | 641 ++ .../pip/_vendor/pygments/formatters/irc.py | 179 + .../pip/_vendor/pygments/formatters/latex.py | 511 + .../pip/_vendor/pygments/formatters/other.py | 161 + .../pygments/formatters/pangomarkup.py | 83 + .../pip/_vendor/pygments/formatters/rtf.py | 146 + .../pip/_vendor/pygments/formatters/svg.py | 188 + .../_vendor/pygments/formatters/terminal.py | 127 + .../pygments/formatters/terminal256.py | 338 + .../pip/_vendor/pygments/lexer.py | 879 ++ .../pip/_vendor/pygments/lexers/__init__.py | 341 + .../pip/_vendor/pygments/lexers/_mapping.py | 580 ++ .../pip/_vendor/pygments/lexers/python.py | 1188 +++ .../pip/_vendor/pygments/modeline.py | 43 + .../pip/_vendor/pygments/plugin.py | 69 + .../pip/_vendor/pygments/regexopt.py | 91 + .../pip/_vendor/pygments/scanner.py | 104 + .../pip/_vendor/pygments/sphinxext.py | 155 + .../pip/_vendor/pygments/style.py | 197 + .../pip/_vendor/pygments/styles/__init__.py | 93 + .../pip/_vendor/pygments/token.py | 212 + .../pip/_vendor/pygments/unistring.py | 153 + .../pip/_vendor/pygments/util.py | 308 + .../pip/_vendor/pyparsing/__init__.py | 328 + .../pip/_vendor/pyparsing/actions.py | 207 + .../pip/_vendor/pyparsing/common.py | 424 + .../pip/_vendor/pyparsing/core.py | 5789 +++++++++++ .../pip/_vendor/pyparsing/diagram/__init__.py | 593 ++ .../pip/_vendor/pyparsing/exceptions.py | 267 + .../pip/_vendor/pyparsing/helpers.py | 1069 +++ .../pip/_vendor/pyparsing/results.py | 760 ++ .../pip/_vendor/pyparsing/testing.py | 331 + .../pip/_vendor/pyparsing/unicode.py | 332 + .../pip/_vendor/pyparsing/util.py | 235 + .../pip/_vendor/requests/__init__.py | 154 + .../pip/_vendor/requests/__version__.py | 14 + .../pip/_vendor/requests/_internal_utils.py | 42 + .../pip/_vendor/requests/adapters.py | 538 ++ .../site-packages/pip/_vendor/requests/api.py | 159 + .../pip/_vendor/requests/auth.py | 305 + .../pip/_vendor/requests/certs.py | 18 + .../pip/_vendor/requests/compat.py | 77 + .../pip/_vendor/requests/cookies.py | 549 ++ .../pip/_vendor/requests/exceptions.py | 133 + .../pip/_vendor/requests/help.py | 132 + .../pip/_vendor/requests/hooks.py | 34 + .../pip/_vendor/requests/models.py | 973 ++ .../pip/_vendor/requests/packages.py | 16 + .../pip/_vendor/requests/sessions.py | 771 ++ .../pip/_vendor/requests/status_codes.py | 123 + .../pip/_vendor/requests/structures.py | 105 + .../pip/_vendor/requests/utils.py | 1060 ++ .../pip/_vendor/resolvelib/__init__.py | 26 + .../pip/_vendor/resolvelib/compat/__init__.py | 0 .../resolvelib/compat/collections_abc.py | 6 + .../pip/_vendor/resolvelib/providers.py | 133 + .../pip/_vendor/resolvelib/reporters.py | 43 + .../pip/_vendor/resolvelib/resolvers.py | 482 + .../pip/_vendor/resolvelib/structs.py | 165 + .../pip/_vendor/rich/__init__.py | 172 + .../pip/_vendor/rich/__main__.py | 280 + .../pip/_vendor/rich/_cell_widths.py | 451 + .../pip/_vendor/rich/_emoji_codes.py | 3610 +++++++ .../pip/_vendor/rich/_emoji_replace.py | 32 + .../pip/_vendor/rich/_extension.py | 10 + .../pip/_vendor/rich/_inspect.py | 210 + .../pip/_vendor/rich/_log_render.py | 94 + .../site-packages/pip/_vendor/rich/_loop.py | 43 + .../pip/_vendor/rich/_lru_cache.py | 34 + .../pip/_vendor/rich/_palettes.py | 309 + .../site-packages/pip/_vendor/rich/_pick.py | 17 + .../site-packages/pip/_vendor/rich/_ratio.py | 160 + .../pip/_vendor/rich/_spinners.py | 848 ++ .../site-packages/pip/_vendor/rich/_stack.py | 16 + .../site-packages/pip/_vendor/rich/_timer.py | 19 + .../pip/_vendor/rich/_windows.py | 72 + .../site-packages/pip/_vendor/rich/_wrap.py | 55 + .../site-packages/pip/_vendor/rich/abc.py | 33 + .../site-packages/pip/_vendor/rich/align.py | 312 + .../site-packages/pip/_vendor/rich/ansi.py | 228 + .../site-packages/pip/_vendor/rich/bar.py | 94 + .../site-packages/pip/_vendor/rich/box.py | 483 + .../site-packages/pip/_vendor/rich/cells.py | 147 + .../site-packages/pip/_vendor/rich/color.py | 581 ++ .../pip/_vendor/rich/color_triplet.py | 38 + .../site-packages/pip/_vendor/rich/columns.py | 187 + .../site-packages/pip/_vendor/rich/console.py | 2211 +++++ .../pip/_vendor/rich/constrain.py | 37 + .../pip/_vendor/rich/containers.py | 167 + .../site-packages/pip/_vendor/rich/control.py | 175 + .../pip/_vendor/rich/default_styles.py | 183 + .../pip/_vendor/rich/diagnose.py | 6 + .../site-packages/pip/_vendor/rich/emoji.py | 96 + .../site-packages/pip/_vendor/rich/errors.py | 34 + .../pip/_vendor/rich/file_proxy.py | 54 + .../pip/_vendor/rich/filesize.py | 89 + .../pip/_vendor/rich/highlighter.py | 147 + .../site-packages/pip/_vendor/rich/json.py | 140 + .../site-packages/pip/_vendor/rich/jupyter.py | 92 + .../site-packages/pip/_vendor/rich/layout.py | 444 + .../site-packages/pip/_vendor/rich/live.py | 365 + .../pip/_vendor/rich/live_render.py | 113 + .../site-packages/pip/_vendor/rich/logging.py | 268 + .../site-packages/pip/_vendor/rich/markup.py | 244 + .../site-packages/pip/_vendor/rich/measure.py | 149 + .../site-packages/pip/_vendor/rich/padding.py | 141 + .../site-packages/pip/_vendor/rich/pager.py | 34 + .../site-packages/pip/_vendor/rich/palette.py | 100 + .../site-packages/pip/_vendor/rich/panel.py | 250 + .../site-packages/pip/_vendor/rich/pretty.py | 903 ++ .../pip/_vendor/rich/progress.py | 1036 ++ .../pip/_vendor/rich/progress_bar.py | 216 + .../site-packages/pip/_vendor/rich/prompt.py | 376 + .../pip/_vendor/rich/protocol.py | 42 + .../site-packages/pip/_vendor/rich/region.py | 10 + .../site-packages/pip/_vendor/rich/repr.py | 151 + .../site-packages/pip/_vendor/rich/rule.py | 115 + .../site-packages/pip/_vendor/rich/scope.py | 86 + .../site-packages/pip/_vendor/rich/screen.py | 54 + .../site-packages/pip/_vendor/rich/segment.py | 720 ++ .../site-packages/pip/_vendor/rich/spinner.py | 134 + .../site-packages/pip/_vendor/rich/status.py | 132 + .../site-packages/pip/_vendor/rich/style.py | 785 ++ .../site-packages/pip/_vendor/rich/styled.py | 42 + .../site-packages/pip/_vendor/rich/syntax.py | 735 ++ .../site-packages/pip/_vendor/rich/table.py | 968 ++ .../pip/_vendor/rich/tabulate.py | 51 + .../pip/_vendor/rich/terminal_theme.py | 55 + .../site-packages/pip/_vendor/rich/text.py | 1282 +++ .../site-packages/pip/_vendor/rich/theme.py | 112 + .../site-packages/pip/_vendor/rich/themes.py | 5 + .../pip/_vendor/rich/traceback.py | 678 ++ .../site-packages/pip/_vendor/rich/tree.py | 249 + .../site-packages/pip/_vendor/six.py | 998 ++ .../pip/_vendor/tenacity/__init__.py | 517 + .../pip/_vendor/tenacity/_asyncio.py | 92 + .../pip/_vendor/tenacity/_utils.py | 68 + .../pip/_vendor/tenacity/after.py | 46 + .../pip/_vendor/tenacity/before.py | 41 + .../pip/_vendor/tenacity/before_sleep.py | 58 + .../site-packages/pip/_vendor/tenacity/nap.py | 43 + .../pip/_vendor/tenacity/retry.py | 213 + .../pip/_vendor/tenacity/stop.py | 96 + .../pip/_vendor/tenacity/tornadoweb.py | 59 + .../pip/_vendor/tenacity/wait.py | 191 + .../pip/_vendor/tomli/__init__.py | 6 + .../pip/_vendor/tomli/_parser.py | 703 ++ .../site-packages/pip/_vendor/tomli/_re.py | 83 + .../pip/_vendor/typing_extensions.py | 2296 +++++ .../pip/_vendor/urllib3/__init__.py | 85 + .../pip/_vendor/urllib3/_collections.py | 337 + .../pip/_vendor/urllib3/_version.py | 2 + .../pip/_vendor/urllib3/connection.py | 569 ++ .../pip/_vendor/urllib3/connectionpool.py | 1108 +++ .../pip/_vendor/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 36 + .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 519 + .../contrib/_securetransport/low_level.py | 397 + .../pip/_vendor/urllib3/contrib/appengine.py | 314 + .../pip/_vendor/urllib3/contrib/ntlmpool.py | 130 + .../pip/_vendor/urllib3/contrib/pyopenssl.py | 511 + .../urllib3/contrib/securetransport.py | 922 ++ .../pip/_vendor/urllib3/contrib/socks.py | 216 + .../pip/_vendor/urllib3/exceptions.py | 323 + .../pip/_vendor/urllib3/fields.py | 274 + .../pip/_vendor/urllib3/filepost.py | 98 + .../pip/_vendor/urllib3/packages/__init__.py | 0 .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 51 + .../pip/_vendor/urllib3/packages/six.py | 1077 +++ .../pip/_vendor/urllib3/poolmanager.py | 536 ++ .../pip/_vendor/urllib3/request.py | 170 + .../pip/_vendor/urllib3/response.py | 821 ++ .../pip/_vendor/urllib3/util/__init__.py | 49 + .../pip/_vendor/urllib3/util/connection.py | 149 + .../pip/_vendor/urllib3/util/proxy.py | 57 + .../pip/_vendor/urllib3/util/queue.py | 22 + .../pip/_vendor/urllib3/util/request.py | 143 + .../pip/_vendor/urllib3/util/response.py | 107 + .../pip/_vendor/urllib3/util/retry.py | 620 ++ .../pip/_vendor/urllib3/util/ssl_.py | 495 + .../urllib3/util/ssl_match_hostname.py | 161 + .../pip/_vendor/urllib3/util/ssltransport.py | 221 + .../pip/_vendor/urllib3/util/timeout.py | 268 + .../pip/_vendor/urllib3/util/url.py | 432 + .../pip/_vendor/urllib3/util/wait.py | 153 + .../site-packages/pip/_vendor/vendor.txt | 25 + .../pip/_vendor/webencodings/__init__.py | 342 + .../pip/_vendor/webencodings/labels.py | 231 + .../pip/_vendor/webencodings/mklabels.py | 59 + .../pip/_vendor/webencodings/tests.py | 153 + .../_vendor/webencodings/x_user_defined.py | 325 + .../lib/python3.9/site-packages/pip/py.typed | 4 + .../site-packages/pkg_resources/__init__.py | 3303 +++++++ .../pkg_resources/_vendor/__init__.py | 0 .../pkg_resources/_vendor/appdirs.py | 608 ++ .../_vendor/packaging/__about__.py | 26 + .../_vendor/packaging/__init__.py | 25 + .../_vendor/packaging/_manylinux.py | 301 + .../_vendor/packaging/_musllinux.py | 136 + .../_vendor/packaging/_structures.py | 67 + .../_vendor/packaging/markers.py | 304 + .../_vendor/packaging/requirements.py | 146 + .../_vendor/packaging/specifiers.py | 828 ++ .../pkg_resources/_vendor/packaging/tags.py | 484 + .../pkg_resources/_vendor/packaging/utils.py | 136 + .../_vendor/packaging/version.py | 504 + .../pkg_resources/_vendor/pyparsing.py | 5742 +++++++++++ .../pkg_resources/extern/__init__.py | 73 + .../data/my-test-package-source/setup.py | 6 + .../pvectorc.cpython-39-darwin.so | Bin 0 -> 158950 bytes .../pycparser-2.21.dist-info/INSTALLER | 1 + .../pycparser-2.21.dist-info/LICENSE | 27 + .../pycparser-2.21.dist-info/METADATA | 31 + .../pycparser-2.21.dist-info/RECORD | 41 + .../pycparser-2.21.dist-info/WHEEL | 6 + .../pycparser-2.21.dist-info/top_level.txt | 1 + .../site-packages/pycparser/__init__.py | 90 + .../site-packages/pycparser/_ast_gen.py | 336 + .../site-packages/pycparser/_build_tables.py | 37 + .../site-packages/pycparser/_c_ast.cfg | 195 + .../site-packages/pycparser/ast_transforms.py | 164 + .../site-packages/pycparser/c_ast.py | 1125 +++ .../site-packages/pycparser/c_generator.py | 502 + .../site-packages/pycparser/c_lexer.py | 554 ++ .../site-packages/pycparser/c_parser.py | 1936 ++++ .../site-packages/pycparser/lextab.py | 10 + .../site-packages/pycparser/ply/__init__.py | 5 + .../site-packages/pycparser/ply/cpp.py | 905 ++ .../site-packages/pycparser/ply/ctokens.py | 133 + .../site-packages/pycparser/ply/lex.py | 1099 +++ .../site-packages/pycparser/ply/yacc.py | 3494 +++++++ .../site-packages/pycparser/ply/ygen.py | 74 + .../site-packages/pycparser/plyparser.py | 133 + .../site-packages/pycparser/yacctab.py | 366 + .../pyrsistent-0.18.1.dist-info/INSTALLER | 1 + .../pyrsistent-0.18.1.dist-info/LICENSE.mit | 22 + .../pyrsistent-0.18.1.dist-info/METADATA | 771 ++ .../pyrsistent-0.18.1.dist-info/RECORD | 44 + .../pyrsistent-0.18.1.dist-info/WHEEL | 5 + .../pyrsistent-0.18.1.dist-info/top_level.txt | 3 + .../site-packages/pyrsistent/__init__.py | 47 + .../site-packages/pyrsistent/__init__.pyi | 213 + .../pyrsistent/_checked_types.py | 542 ++ .../site-packages/pyrsistent/_field_common.py | 332 + .../site-packages/pyrsistent/_helpers.py | 97 + .../site-packages/pyrsistent/_immutable.py | 103 + .../site-packages/pyrsistent/_pbag.py | 267 + .../site-packages/pyrsistent/_pclass.py | 262 + .../site-packages/pyrsistent/_pdeque.py | 376 + .../site-packages/pyrsistent/_plist.py | 313 + .../site-packages/pyrsistent/_pmap.py | 461 + .../site-packages/pyrsistent/_precord.py | 167 + .../site-packages/pyrsistent/_pset.py | 227 + .../site-packages/pyrsistent/_pvector.py | 711 ++ .../site-packages/pyrsistent/_toolz.py | 83 + .../pyrsistent/_transformations.py | 139 + .../site-packages/pyrsistent/py.typed | 0 .../site-packages/pyrsistent/typing.py | 80 + .../site-packages/pyrsistent/typing.pyi | 292 + .../requests-2.27.1.dist-info/INSTALLER | 1 + .../requests-2.27.1.dist-info/LICENSE | 175 + .../requests-2.27.1.dist-info/METADATA | 125 + .../requests-2.27.1.dist-info/RECORD | 42 + .../requests-2.27.1.dist-info/WHEEL | 6 + .../requests-2.27.1.dist-info/top_level.txt | 1 + .../site-packages/requests/__init__.py | 152 + .../site-packages/requests/__version__.py | 14 + .../site-packages/requests/_internal_utils.py | 42 + .../site-packages/requests/adapters.py | 538 ++ .../python3.9/site-packages/requests/api.py | 159 + .../python3.9/site-packages/requests/auth.py | 305 + .../python3.9/site-packages/requests/certs.py | 18 + .../site-packages/requests/compat.py | 81 + .../site-packages/requests/cookies.py | 549 ++ .../site-packages/requests/exceptions.py | 133 + .../python3.9/site-packages/requests/help.py | 135 + .../python3.9/site-packages/requests/hooks.py | 34 + .../site-packages/requests/models.py | 973 ++ .../site-packages/requests/packages.py | 26 + .../site-packages/requests/sessions.py | 771 ++ .../site-packages/requests/status_codes.py | 123 + .../site-packages/requests/structures.py | 105 + .../python3.9/site-packages/requests/utils.py | 1060 ++ .../rfc3986-1.5.0.dist-info/AUTHORS.rst | 14 + .../rfc3986-1.5.0.dist-info/INSTALLER | 1 + .../rfc3986-1.5.0.dist-info/LICENSE | 13 + .../rfc3986-1.5.0.dist-info/METADATA | 230 + .../rfc3986-1.5.0.dist-info/RECORD | 33 + .../rfc3986-1.5.0.dist-info/WHEEL | 6 + .../rfc3986-1.5.0.dist-info/top_level.txt | 1 + .../site-packages/rfc3986/__init__.py | 56 + .../python3.9/site-packages/rfc3986/_mixin.py | 373 + .../site-packages/rfc3986/abnf_regexp.py | 282 + .../python3.9/site-packages/rfc3986/api.py | 106 + .../site-packages/rfc3986/builder.py | 389 + .../python3.9/site-packages/rfc3986/compat.py | 60 + .../site-packages/rfc3986/exceptions.py | 124 + .../python3.9/site-packages/rfc3986/iri.py | 162 + .../python3.9/site-packages/rfc3986/misc.py | 135 + .../site-packages/rfc3986/normalizers.py | 172 + .../site-packages/rfc3986/parseresult.py | 479 + .../python3.9/site-packages/rfc3986/uri.py | 161 + .../site-packages/rfc3986/validators.py | 447 + .../setuptools-60.5.0.dist-info/INSTALLER | 1 + .../setuptools-60.5.0.dist-info/LICENSE | 19 + .../setuptools-60.5.0.dist-info/METADATA | 142 + .../setuptools-60.5.0.dist-info/RECORD | 308 + .../setuptools-60.5.0.dist-info/REQUESTED | 0 .../setuptools-60.5.0.dist-info/WHEEL | 5 + .../entry_points.txt | 56 + .../setuptools-60.5.0.dist-info/top_level.txt | 3 + .../site-packages/setuptools/__init__.py | 244 + .../setuptools/_deprecation_warning.py | 7 + .../setuptools/_distutils/__init__.py | 24 + .../setuptools/_distutils/_collections.py | 56 + .../setuptools/_distutils/_msvccompiler.py | 561 ++ .../setuptools/_distutils/archive_util.py | 256 + .../setuptools/_distutils/bcppcompiler.py | 393 + .../setuptools/_distutils/ccompiler.py | 1123 +++ .../setuptools/_distutils/cmd.py | 403 + .../setuptools/_distutils/command/__init__.py | 31 + .../setuptools/_distutils/command/bdist.py | 143 + .../_distutils/command/bdist_dumb.py | 123 + .../_distutils/command/bdist_msi.py | 749 ++ .../_distutils/command/bdist_rpm.py | 579 ++ .../_distutils/command/bdist_wininst.py | 377 + .../setuptools/_distutils/command/build.py | 157 + .../_distutils/command/build_clib.py | 209 + .../_distutils/command/build_ext.py | 755 ++ .../setuptools/_distutils/command/build_py.py | 392 + .../_distutils/command/build_scripts.py | 152 + .../setuptools/_distutils/command/check.py | 148 + .../setuptools/_distutils/command/clean.py | 76 + .../setuptools/_distutils/command/config.py | 344 + .../setuptools/_distutils/command/install.py | 775 ++ .../_distutils/command/install_data.py | 79 + .../_distutils/command/install_egg_info.py | 84 + .../_distutils/command/install_headers.py | 47 + .../_distutils/command/install_lib.py | 217 + .../_distutils/command/install_scripts.py | 60 + .../_distutils/command/py37compat.py | 30 + .../setuptools/_distutils/command/register.py | 304 + .../setuptools/_distutils/command/sdist.py | 494 + .../setuptools/_distutils/command/upload.py | 214 + .../setuptools/_distutils/config.py | 130 + .../setuptools/_distutils/core.py | 249 + .../setuptools/_distutils/cygwinccompiler.py | 362 + .../setuptools/_distutils/debug.py | 5 + .../setuptools/_distutils/dep_util.py | 92 + .../setuptools/_distutils/dir_util.py | 210 + .../setuptools/_distutils/dist.py | 1257 +++ .../setuptools/_distutils/errors.py | 97 + .../setuptools/_distutils/extension.py | 240 + .../setuptools/_distutils/fancy_getopt.py | 457 + .../setuptools/_distutils/file_util.py | 238 + .../setuptools/_distutils/filelist.py | 355 + .../setuptools/_distutils/log.py | 81 + .../setuptools/_distutils/msvc9compiler.py | 788 ++ .../setuptools/_distutils/msvccompiler.py | 643 ++ .../setuptools/_distutils/py35compat.py | 19 + .../setuptools/_distutils/py38compat.py | 7 + .../setuptools/_distutils/spawn.py | 106 + .../setuptools/_distutils/sysconfig.py | 567 ++ .../setuptools/_distutils/text_file.py | 286 + .../setuptools/_distutils/unixccompiler.py | 325 + .../setuptools/_distutils/util.py | 548 ++ .../setuptools/_distutils/version.py | 363 + .../setuptools/_distutils/versionpredicate.py | 169 + .../site-packages/setuptools/_imp.py | 82 + .../setuptools/_vendor/__init__.py | 0 .../_vendor/more_itertools/__init__.py | 4 + .../setuptools/_vendor/more_itertools/more.py | 3825 ++++++++ .../_vendor/more_itertools/recipes.py | 620 ++ .../setuptools/_vendor/ordered_set.py | 488 + .../setuptools/_vendor/packaging/__about__.py | 26 + .../setuptools/_vendor/packaging/__init__.py | 25 + .../_vendor/packaging/_manylinux.py | 301 + .../_vendor/packaging/_musllinux.py | 136 + .../_vendor/packaging/_structures.py | 67 + .../setuptools/_vendor/packaging/markers.py | 304 + .../_vendor/packaging/requirements.py | 146 + .../_vendor/packaging/specifiers.py | 828 ++ .../setuptools/_vendor/packaging/tags.py | 484 + .../setuptools/_vendor/packaging/utils.py | 136 + .../setuptools/_vendor/packaging/version.py | 504 + .../setuptools/_vendor/pyparsing.py | 5742 +++++++++++ .../site-packages/setuptools/archive_util.py | 205 + .../site-packages/setuptools/build_meta.py | 290 + .../site-packages/setuptools/cli-32.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/cli-64.exe | Bin 0 -> 74752 bytes .../site-packages/setuptools/cli-arm64.exe | Bin 0 -> 137216 bytes .../site-packages/setuptools/cli.exe | Bin 0 -> 65536 bytes .../setuptools/command/__init__.py | 8 + .../site-packages/setuptools/command/alias.py | 78 + .../setuptools/command/bdist_egg.py | 456 + .../setuptools/command/bdist_rpm.py | 40 + .../setuptools/command/build_clib.py | 101 + .../setuptools/command/build_ext.py | 328 + .../setuptools/command/build_py.py | 242 + .../setuptools/command/develop.py | 193 + .../setuptools/command/dist_info.py | 36 + .../setuptools/command/easy_install.py | 2304 +++++ .../setuptools/command/egg_info.py | 755 ++ .../setuptools/command/install.py | 132 + .../setuptools/command/install_egg_info.py | 62 + .../setuptools/command/install_lib.py | 122 + .../setuptools/command/install_scripts.py | 69 + .../setuptools/command/launcher manifest.xml | 15 + .../setuptools/command/py36compat.py | 134 + .../setuptools/command/register.py | 18 + .../setuptools/command/rotate.py | 64 + .../setuptools/command/saveopts.py | 22 + .../site-packages/setuptools/command/sdist.py | 196 + .../setuptools/command/setopt.py | 149 + .../site-packages/setuptools/command/test.py | 252 + .../setuptools/command/upload.py | 17 + .../setuptools/command/upload_docs.py | 202 + .../site-packages/setuptools/config.py | 751 ++ .../site-packages/setuptools/dep_util.py | 25 + .../site-packages/setuptools/depends.py | 176 + .../site-packages/setuptools/dist.py | 1156 +++ .../site-packages/setuptools/errors.py | 40 + .../site-packages/setuptools/extension.py | 55 + .../setuptools/extern/__init__.py | 73 + .../site-packages/setuptools/glob.py | 167 + .../site-packages/setuptools/gui-32.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/gui-64.exe | Bin 0 -> 75264 bytes .../site-packages/setuptools/gui-arm64.exe | Bin 0 -> 137728 bytes .../site-packages/setuptools/gui.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/installer.py | 104 + .../site-packages/setuptools/launch.py | 36 + .../site-packages/setuptools/logging.py | 30 + .../site-packages/setuptools/monkey.py | 177 + .../site-packages/setuptools/msvc.py | 1805 ++++ .../site-packages/setuptools/namespaces.py | 107 + .../site-packages/setuptools/package_index.py | 1127 +++ .../site-packages/setuptools/py34compat.py | 13 + .../site-packages/setuptools/sandbox.py | 530 + .../setuptools/script (dev).tmpl | 6 + .../site-packages/setuptools/script.tmpl | 3 + .../setuptools/tests/integration/__init__.py | 0 .../setuptools/tests/integration/helpers.py | 61 + .../integration/test_pip_install_sdist.py | 218 + .../site-packages/setuptools/unicode_utils.py | 42 + .../site-packages/setuptools/version.py | 6 + .../site-packages/setuptools/wheel.py | 213 + .../setuptools/windows_support.py | 29 + .../sniffio-1.2.0.dist-info/INSTALLER | 1 + .../sniffio-1.2.0.dist-info/LICENSE | 3 + .../sniffio-1.2.0.dist-info/LICENSE.APACHE2 | 202 + .../sniffio-1.2.0.dist-info/LICENSE.MIT | 20 + .../sniffio-1.2.0.dist-info/METADATA | 103 + .../sniffio-1.2.0.dist-info/RECORD | 19 + .../sniffio-1.2.0.dist-info/WHEEL | 5 + .../sniffio-1.2.0.dist-info/top_level.txt | 1 + .../site-packages/sniffio/__init__.py | 14 + .../python3.9/site-packages/sniffio/_impl.py | 83 + .../site-packages/sniffio/_tests/__init__.py | 0 .../sniffio/_tests/test_sniffio.py | 67 + .../site-packages/sniffio/_version.py | 3 + .../python3.9/site-packages/sniffio/py.typed | 0 .../solana-0.23.0.dist-info/INSTALLER | 1 + .../solana-0.23.0.dist-info/LICENSE | 21 + .../solana-0.23.0.dist-info/METADATA | 179 + .../solana-0.23.0.dist-info/RECORD | 104 + .../solana-0.23.0.dist-info/REQUESTED | 0 .../solana-0.23.0.dist-info/WHEEL | 4 + .../site-packages/solana/__init__.py | 5 + .../site-packages/solana/_layouts/__init__.py | 0 .../site-packages/solana/_layouts/account.py | 45 + .../site-packages/solana/_layouts/shared.py | 15 + .../solana/_layouts/system_instructions.py | 88 + .../solana/_layouts/vote_instructions.py | 26 + .../python3.9/site-packages/solana/account.py | 80 + .../site-packages/solana/blockhash.py | 60 + .../site-packages/solana/exceptions.py | 56 + .../site-packages/solana/instruction.py | 52 + .../python3.9/site-packages/solana/keypair.py | 135 + .../python3.9/site-packages/solana/message.py | 225 + .../site-packages/solana/publickey.py | 128 + .../python3.9/site-packages/solana/py.typed | 0 .../site-packages/solana/rpc/__init__.py | 1 + .../solana/rpc/_utils/__init__.py | 0 .../solana/rpc/_utils/encoding.py | 64 + .../python3.9/site-packages/solana/rpc/api.py | 1379 +++ .../site-packages/solana/rpc/async_api.py | 1373 +++ .../site-packages/solana/rpc/commitment.py | 39 + .../site-packages/solana/rpc/core.py | 435 + .../solana/rpc/providers/__init__.py | 1 + .../solana/rpc/providers/async_base.py | 16 + .../solana/rpc/providers/async_http.py | 53 + .../solana/rpc/providers/base.py | 16 + .../solana/rpc/providers/core.py | 58 + .../solana/rpc/providers/http.py | 35 + .../solana/rpc/request_builder.py | 274 + .../site-packages/solana/rpc/responses.py | 262 + .../site-packages/solana/rpc/types.py | 96 + .../site-packages/solana/rpc/websocket_api.py | 335 + .../site-packages/solana/system_program.py | 666 ++ .../python3.9/site-packages/solana/sysvar.py | 34 + .../site-packages/solana/transaction.py | 461 + .../site-packages/solana/utils/__init__.py | 1 + .../site-packages/solana/utils/cluster.py | 47 + .../solana/utils/ed25519_base.py | 58 + .../site-packages/solana/utils/helpers.py | 26 + .../solana/utils/shortvec_encoding.py | 28 + .../site-packages/solana/utils/validate.py | 31 + .../site-packages/solana/vote_program.py | 63 + .../python3.9/site-packages/spl/__init__.py | 1 + .../lib/python3.9/site-packages/spl/py.typed | 0 .../site-packages/spl/token/__init__.py | 1 + .../site-packages/spl/token/_layouts.py | 114 + .../site-packages/spl/token/async_client.py | 567 ++ .../site-packages/spl/token/client.py | 571 ++ .../site-packages/spl/token/constants.py | 26 + .../python3.9/site-packages/spl/token/core.py | 731 ++ .../site-packages/spl/token/instructions.py | 1147 +++ .../INSTALLER | 1 + .../METADATA | 27 + .../types_cachetools-4.2.10.dist-info/RECORD | 16 + .../types_cachetools-4.2.10.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../INSTALLER | 1 + .../LICENSE | 254 + .../METADATA | 46 + .../RECORD | 8 + .../WHEEL | 5 + .../top_level.txt | 1 + .../site-packages/typing_extensions.py | 2843 ++++++ .../urllib3-1.26.9.dist-info/INSTALLER | 1 + .../urllib3-1.26.9.dist-info/LICENSE.txt | 21 + .../urllib3-1.26.9.dist-info/METADATA | 1426 +++ .../urllib3-1.26.9.dist-info/RECORD | 82 + .../urllib3-1.26.9.dist-info/WHEEL | 6 + .../urllib3-1.26.9.dist-info/top_level.txt | 1 + .../site-packages/urllib3/__init__.py | 85 + .../site-packages/urllib3/_collections.py | 337 + .../site-packages/urllib3/_version.py | 2 + .../site-packages/urllib3/connection.py | 567 ++ .../site-packages/urllib3/connectionpool.py | 1108 +++ .../site-packages/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 36 + .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 519 + .../contrib/_securetransport/low_level.py | 397 + .../urllib3/contrib/appengine.py | 314 + .../site-packages/urllib3/contrib/ntlmpool.py | 130 + .../urllib3/contrib/pyopenssl.py | 511 + .../urllib3/contrib/securetransport.py | 922 ++ .../site-packages/urllib3/contrib/socks.py | 216 + .../site-packages/urllib3/exceptions.py | 323 + .../python3.9/site-packages/urllib3/fields.py | 274 + .../site-packages/urllib3/filepost.py | 98 + .../urllib3/packages/__init__.py | 0 .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 51 + .../site-packages/urllib3/packages/six.py | 1077 +++ .../site-packages/urllib3/poolmanager.py | 537 ++ .../site-packages/urllib3/request.py | 170 + .../site-packages/urllib3/response.py | 824 ++ .../site-packages/urllib3/util/__init__.py | 49 + .../site-packages/urllib3/util/connection.py | 149 + .../site-packages/urllib3/util/proxy.py | 57 + .../site-packages/urllib3/util/queue.py | 22 + .../site-packages/urllib3/util/request.py | 146 + .../site-packages/urllib3/util/response.py | 107 + .../site-packages/urllib3/util/retry.py | 620 ++ .../site-packages/urllib3/util/ssl_.py | 495 + .../urllib3/util/ssl_match_hostname.py | 159 + .../urllib3/util/ssltransport.py | 221 + .../site-packages/urllib3/util/timeout.py | 268 + .../site-packages/urllib3/util/url.py | 432 + .../site-packages/urllib3/util/wait.py | 153 + .../websockets-10.2.dist-info/INSTALLER | 1 + .../websockets-10.2.dist-info/LICENSE | 25 + .../websockets-10.2.dist-info/METADATA | 174 + .../websockets-10.2.dist-info/RECORD | 69 + .../websockets-10.2.dist-info/WHEEL | 5 + .../websockets-10.2.dist-info/top_level.txt | 3 + .../site-packages/websockets/__init__.py | 114 + .../site-packages/websockets/__main__.py | 230 + .../site-packages/websockets/auth.py | 4 + .../site-packages/websockets/client.py | 344 + .../site-packages/websockets/connection.py | 693 ++ .../websockets/datastructures.py | 168 + .../site-packages/websockets/exceptions.py | 398 + .../websockets/extensions/__init__.py | 4 + .../websockets/extensions/base.py | 128 + .../extensions/permessage_deflate.py | 658 ++ .../site-packages/websockets/frames.py | 441 + .../site-packages/websockets/headers.py | 587 ++ .../site-packages/websockets/http.py | 30 + .../site-packages/websockets/http11.py | 351 + .../site-packages/websockets/imports.py | 99 + .../websockets/legacy/__init__.py | 0 .../site-packages/websockets/legacy/auth.py | 188 + .../site-packages/websockets/legacy/client.py | 709 ++ .../websockets/legacy/compatibility.py | 13 + .../websockets/legacy/framing.py | 174 + .../websockets/legacy/handshake.py | 165 + .../site-packages/websockets/legacy/http.py | 201 + .../websockets/legacy/protocol.py | 1602 ++++ .../site-packages/websockets/legacy/server.py | 1154 +++ .../site-packages/websockets/py.typed | 0 .../site-packages/websockets/server.py | 508 + .../site-packages/websockets/speedups.c | 223 + .../websockets/speedups.cpython-39-darwin.so | Bin 0 -> 34664 bytes .../site-packages/websockets/streams.py | 151 + .../site-packages/websockets/typing.py | 60 + .../python3.9/site-packages/websockets/uri.py | 108 + .../site-packages/websockets/utils.py | 51 + .../site-packages/websockets/version.py | 78 + .venv/pyvenv.cfg | 3 + app.py | 40 + 1511 files changed, 421304 insertions(+) create mode 100644 .envrc create mode 100644 .venv/bin/Activate.ps1 create mode 100644 .venv/bin/activate create mode 100644 .venv/bin/activate.csh create mode 100644 .venv/bin/activate.fish create mode 100755 .venv/bin/jsonschema create mode 100755 .venv/bin/normalizer create mode 100755 .venv/bin/pip create mode 100755 .venv/bin/pip3 create mode 100755 .venv/bin/pip3.9 create mode 120000 .venv/bin/python create mode 120000 .venv/bin/python3 create mode 120000 .venv/bin/python3.9 create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/zip-safe create mode 100644 .venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt create mode 100755 .venv/lib/python3.9/site-packages/_cffi_backend.cpython-39-darwin.so create mode 100644 .venv/lib/python3.9/site-packages/_distutils_hack/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/_distutils_hack/override.py create mode 100644 .venv/lib/python3.9/site-packages/_pyrsistent_version.py create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/entry_points.txt create mode 100644 .venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/anyio/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_backends/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_backends/_trio.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_compat.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_eventloop.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_fileio.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_resources.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_signals.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_sockets.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_streams.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_synchronization.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_tasks.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_testing.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/_core/_typedattr.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/_resources.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/_sockets.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/_streams.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/_tasks.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/abc/_testing.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/from_thread.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/lowlevel.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/py.typed create mode 100644 .venv/lib/python3.9/site-packages/anyio/pytest_plugin.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/buffered.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/file.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/memory.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/stapled.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/text.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/streams/tls.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/to_process.py create mode 100644 .venv/lib/python3.9/site-packages/anyio/to_thread.py create mode 100644 .venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/LICENSE.txt create mode 100644 .venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/apischema/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/aliases.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/cache.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/conversions.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/converters.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/dataclass_models.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/utils.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/visitor.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/conversions/wrappers.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/dataclasses.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/dependencies.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/dependent_required.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/deserialization/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/deserialization/coercion.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/deserialization/flattened.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/fields.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/interfaces.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/relay/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/relay/connections.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/relay/global_identification.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/relay/mutations.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/relay/utils.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/resolvers.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/graphql/schema.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/conversions_resolver.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/patterns.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/refs.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/schema.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/types.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/json_schema/versions.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/metadata/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/metadata/implem.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/metadata/keys.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/methods.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/objects/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/objects/conversions.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/objects/fields.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/objects/getters.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/objects/visitor.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/ordering.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/py.typed create mode 100644 .venv/lib/python3.9/site-packages/apischema/recursion.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/schemas/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/schemas/annotations.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/schemas/constraints.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/serialization/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/serialization/serialized_methods.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/settings.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/skip.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/std_types.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/tagged_unions.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/type_names.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/types.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/typing.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/utils.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/validation/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/validation/dependencies.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/validation/errors.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/validation/mock.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/validation/validators.py create mode 100644 .venv/lib/python3.9/site-packages/apischema/visitor.py create mode 100644 .venv/lib/python3.9/site-packages/attr/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/attr/__init__.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/_cmp.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_cmp.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/_compat.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_config.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_funcs.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_make.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_next_gen.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_version_info.py create mode 100644 .venv/lib/python3.9/site-packages/attr/_version_info.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/converters.py create mode 100644 .venv/lib/python3.9/site-packages/attr/converters.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/attr/exceptions.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/filters.py create mode 100644 .venv/lib/python3.9/site-packages/attr/filters.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/py.typed create mode 100644 .venv/lib/python3.9/site-packages/attr/setters.py create mode 100644 .venv/lib/python3.9/site-packages/attr/setters.pyi create mode 100644 .venv/lib/python3.9/site-packages/attr/validators.py create mode 100644 .venv/lib/python3.9/site-packages/attr/validators.pyi create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/attrs/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/attrs/__init__.pyi create mode 100644 .venv/lib/python3.9/site-packages/attrs/converters.py create mode 100644 .venv/lib/python3.9/site-packages/attrs/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/attrs/filters.py create mode 100644 .venv/lib/python3.9/site-packages/attrs/py.typed create mode 100644 .venv/lib/python3.9/site-packages/attrs/setters.py create mode 100644 .venv/lib/python3.9/site-packages/attrs/validators.py create mode 100644 .venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/based58/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/based58/__init__.pyi create mode 100755 .venv/lib/python3.9/site-packages/based58/based58.abi3.so create mode 100644 .venv/lib/python3.9/site-packages/based58/py.typed create mode 100644 .venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/METADATA.toml create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/__init__.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/cache.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/fifo.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/func.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/keys.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/lfu.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/lru.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/mru.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/rr.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools-stubs/ttl.pyi create mode 100644 .venv/lib/python3.9/site-packages/cachetools/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/cache.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/fifo.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/func.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/keys.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/lfu.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/lru.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/mru.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/rr.py create mode 100644 .venv/lib/python3.9/site-packages/cachetools/ttl.py create mode 100644 .venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/certifi/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/certifi/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/certifi/cacert.pem create mode 100644 .venv/lib/python3.9/site-packages/certifi/core.py create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/entry_points.txt create mode 100644 .venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/cffi/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/_cffi_errors.h create mode 100644 .venv/lib/python3.9/site-packages/cffi/_cffi_include.h create mode 100644 .venv/lib/python3.9/site-packages/cffi/_embedding.h create mode 100644 .venv/lib/python3.9/site-packages/cffi/api.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/backend_ctypes.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/cffi_opcode.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/commontypes.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/cparser.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/error.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/ffiplatform.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/lock.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/model.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/parse_c_type.h create mode 100644 .venv/lib/python3.9/site-packages/cffi/pkgconfig.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/recompiler.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/setuptools_ext.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/vengine_cpy.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/vengine_gen.py create mode 100644 .venv/lib/python3.9/site-packages/cffi/verifier.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/entry_points.txt create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/api.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/cd.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/cli/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/constant.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/legacy.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/md.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/models.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/py.typed create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/utils.py create mode 100644 .venv/lib/python3.9/site-packages/charset_normalizer/version.py create mode 100644 .venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/__init__.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/core.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/debug.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/expr.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/lib/__init__.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/lib/binary.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/lib/bitstream.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/lib/containers.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/lib/hex.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/lib/py3compat.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct-stubs/version.pyi create mode 100644 .venv/lib/python3.9/site-packages/construct/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/construct/core.py create mode 100644 .venv/lib/python3.9/site-packages/construct/debug.py create mode 100644 .venv/lib/python3.9/site-packages/construct/expr.py create mode 100644 .venv/lib/python3.9/site-packages/construct/lib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/construct/lib/binary.py create mode 100644 .venv/lib/python3.9/site-packages/construct/lib/bitstream.py create mode 100644 .venv/lib/python3.9/site-packages/construct/lib/containers.py create mode 100644 .venv/lib/python3.9/site-packages/construct/lib/hex.py create mode 100644 .venv/lib/python3.9/site-packages/construct/lib/py3compat.py create mode 100644 .venv/lib/python3.9/site-packages/construct/version.py create mode 100644 .venv/lib/python3.9/site-packages/construct_typed/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/construct_typed/dataclass_struct.py create mode 100644 .venv/lib/python3.9/site-packages/construct_typed/generic_wrapper.py create mode 100644 .venv/lib/python3.9/site-packages/construct_typed/py.typed create mode 100644 .venv/lib/python3.9/site-packages/construct_typed/tenum.py create mode 100644 .venv/lib/python3.9/site-packages/construct_typed/version.py create mode 100644 .venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/distutils-precedence.pth create mode 100644 .venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/LICENSE.txt create mode 100644 .venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/h11/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_abnf.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_connection.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_events.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_headers.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_readers.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_receivebuffer.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_state.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_util.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_version.py create mode 100644 .venv/lib/python3.9/site-packages/h11/_writers.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/data/test-file create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/helpers.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_against_stdlib_http.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_connection.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_events.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_headers.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_helpers.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_io.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_receivebuffer.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_state.py create mode 100644 .venv/lib/python3.9/site-packages/h11/tests/test_util.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/LICENSE.md create mode 100644 .venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/httpcore/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/base.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/connection.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/connection_pool.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/http.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/http11.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/http2.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_async/http_proxy.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/anyio.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/asyncio.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/auto.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/base.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/curio.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/sync.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_backends/trio.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_bytestreams.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/base.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/connection.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/connection_pool.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/http.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/http11.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/http2.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_sync/http_proxy.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_threadlock.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_types.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/_utils.py create mode 100644 .venv/lib/python3.9/site-packages/httpcore/py.typed create mode 100644 .venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/LICENSE.md create mode 100644 .venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/httpx/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/__version__.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_api.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_auth.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_client.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_compat.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_config.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_content.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_decoders.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_models.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_multipart.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_status_codes.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_transports/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_transports/asgi.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_transports/base.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_transports/default.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_transports/mock.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_transports/wsgi.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_types.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/_utils.py create mode 100644 .venv/lib/python3.9/site-packages/httpx/py.typed create mode 100644 .venv/lib/python3.9/site-packages/idna-3.3.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/idna-3.3.dist-info/LICENSE.md create mode 100644 .venv/lib/python3.9/site-packages/idna-3.3.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/idna-3.3.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/idna-3.3.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/idna-3.3.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/idna/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/idna/codec.py create mode 100644 .venv/lib/python3.9/site-packages/idna/compat.py create mode 100644 .venv/lib/python3.9/site-packages/idna/core.py create mode 100644 .venv/lib/python3.9/site-packages/idna/idnadata.py create mode 100644 .venv/lib/python3.9/site-packages/idna/intranges.py create mode 100644 .venv/lib/python3.9/site-packages/idna/package_data.py create mode 100644 .venv/lib/python3.9/site-packages/idna/py.typed create mode 100644 .venv/lib/python3.9/site-packages/idna/uts46data.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/id_generators.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/py.typed create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/requests.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/responses.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/sentinels.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcclient/utils.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/async_dispatcher.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/async_main.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/codes.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/dispatcher.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/main.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/methods.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/py.typed create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/request-schema.json create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/request.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/response.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/result.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/sentinels.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/server.py create mode 100644 .venv/lib/python3.9/site-packages/jsonrpcserver/utils.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/COPYING create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/entry_points.txt create mode 100644 .venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/_format.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/_legacy_validators.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/_reflect.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/_types.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/_utils.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/_validators.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/benchmarks/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/benchmarks/issue232.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/benchmarks/json_schema_test_suite.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/cli.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/protocols.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/draft2019-09.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/draft2020-12.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/draft3.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/draft4.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/draft6.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/draft7.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/schemas/vocabularies.json create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/_helpers.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/_suite.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/fuzz_validate.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_cli.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_deprecations.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_format.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_jsonschema_test_suite.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_types.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_utils.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/tests/test_validators.py create mode 100644 .venv/lib/python3.9/site-packages/jsonschema/validators.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/__init__.py create mode 100755 .venv/lib/python3.9/site-packages/nacl/_sodium.abi3.so create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_aead.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_box.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_core.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_generichash.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_hash.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_kx.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_pwhash.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_scalarmult.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretbox.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretstream.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_shorthash.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/crypto_sign.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/randombytes.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/sodium_core.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/bindings/utils.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/encoding.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/hash.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/hashlib.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/public.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/pwhash/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/pwhash/_argon2.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/pwhash/argon2i.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/pwhash/argon2id.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/pwhash/scrypt.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/py.typed create mode 100644 .venv/lib/python3.9/site-packages/nacl/secret.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/signing.py create mode 100644 .venv/lib/python3.9/site-packages/nacl/utils.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/_version.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/cont.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/do.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/either.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/identity.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/ioaction.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/list.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/maybe.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/monadic.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/observable.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/reader.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/state.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/typing/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/typing/applicative.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/typing/functor.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/typing/monad.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/typing/monoid.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/util/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/util/basic.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/util/fn.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/util/numerals.py create mode 100644 .venv/lib/python3.9/site-packages/oslash/writer.py create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/LICENSE.txt create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/REQUESTED create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/entry_points.txt create mode 100644 .venv/lib/python3.9/site-packages/pip-22.0.4.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/pip/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/build_env.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/main.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/check.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/download.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/help.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/index.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/install.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/list.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/search.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/show.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/configuration.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/index/collector.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/index/sources.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/locations/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/main.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/index.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/link.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/auth.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/download.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/session.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/utils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/check.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/pyproject.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/req_tracker.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/models.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/certifi/cacert.pem create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312freq.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312prober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/hebrewprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/jisfreq.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/jpcntx.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langbulgarianmodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langgreekmodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhebrewmodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhungarianmodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langrussianmodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langthaimodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/langturkishmodel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/latin1prober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcharsetprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcsgroupprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcssm.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/languages.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcharsetprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcsgroupprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/sjisprober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/universaldetector.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/utf8prober.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/chardet/version.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/colorama/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansi.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansitowin32.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/colorama/initialise.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/colorama/win32.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/colorama/winterm.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/misc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/shutil.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/database.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/index.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/locators.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/markers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/resources.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/t32.exe create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/t64-arm.exe create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/t64.exe create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/util.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/version.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/w32.exe create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/w64-arm.exe create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/w64.exe create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/distro.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_ihatexml.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_inputstream.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_tokenizer.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/_base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/py.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_utils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/constants.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/lint.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/html5parser.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/serializer.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treebuilders/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treewalkers/base.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/codec.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/core.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/idnadata.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/intranges.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/package_data.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/idna/uts46data.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/msgpack/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/msgpack/_version.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/msgpack/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/msgpack/ext.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/msgpack/fallback.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/__about__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/_manylinux.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/_musllinux.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/markers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/requirements.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/tags.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/utils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/packaging/version.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/build.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/check.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/colorlog.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/dirtools.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/envbuild.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/in_process/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/in_process/_in_process.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/meta.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pep517/wrappers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pkg_resources/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pkg_resources/py31compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/android.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/api.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/macos.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/unix.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/version.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/platformdirs/windows.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/progress/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/progress/bar.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/progress/colors.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/progress/counter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/progress/spinner.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/cmdline.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/console.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/filter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/filters/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/_mapping.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/bbcode.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/groff.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/html.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/img.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/irc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/latex.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/other.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/pangomarkup.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/rtf.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/svg.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/terminal.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/formatters/terminal256.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/lexer.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/lexers/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/lexers/_mapping.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/lexers/python.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/modeline.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/plugin.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/regexopt.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/scanner.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/sphinxext.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/style.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/styles/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/token.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/unistring.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pygments/util.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/actions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/common.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/core.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/diagram/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/helpers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/results.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/testing.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/unicode.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/pyparsing/util.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/__version__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/_internal_utils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/adapters.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/api.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/auth.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/certs.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/compat.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/cookies.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/help.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/hooks.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/models.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/packages.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/sessions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/structures.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/requests/utils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/compat/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/providers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/reporters.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/resolvers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/resolvelib/structs.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_cell_widths.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_emoji_codes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_emoji_replace.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_extension.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_inspect.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_log_render.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_loop.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_lru_cache.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_palettes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_pick.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_ratio.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_spinners.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_stack.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_timer.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_windows.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/_wrap.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/abc.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/align.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/ansi.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/bar.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/box.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/cells.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/color.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/color_triplet.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/columns.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/console.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/constrain.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/containers.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/control.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/default_styles.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/diagnose.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/emoji.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/errors.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/file_proxy.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/filesize.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/highlighter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/json.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/jupyter.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/layout.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/live.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/live_render.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/logging.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/markup.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/measure.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/padding.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/pager.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/palette.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/panel.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/pretty.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/progress.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/progress_bar.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/prompt.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/protocol.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/region.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/repr.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/rule.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/scope.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/screen.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/segment.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/spinner.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/status.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/style.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/styled.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/syntax.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/table.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/tabulate.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/terminal_theme.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/text.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/theme.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/themes.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/traceback.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/rich/tree.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/six.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/_asyncio.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/_utils.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/after.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/before.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/before_sleep.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/nap.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/retry.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/stop.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/tornadoweb.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tenacity/wait.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tomli/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tomli/_parser.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/tomli/_re.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/typing_extensions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/_collections.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/_version.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/connection.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/connectionpool.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/appengine.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/securetransport.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/contrib/socks.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/fields.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/filepost.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/packages/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/packages/six.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/poolmanager.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/request.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/response.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/connection.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/proxy.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/queue.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/request.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/response.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/retry.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/ssl_.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/ssl_match_hostname.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/ssltransport.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/timeout.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/url.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/urllib3/util/wait.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/vendor.txt create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/webencodings/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/webencodings/labels.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/webencodings/mklabels.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/webencodings/tests.py create mode 100644 .venv/lib/python3.9/site-packages/pip/_vendor/webencodings/x_user_defined.py create mode 100644 .venv/lib/python3.9/site-packages/pip/py.typed create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/appdirs.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/__about__.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/_manylinux.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/_musllinux.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/_structures.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/markers.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/requirements.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/tags.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/utils.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/packaging/version.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/_vendor/pyparsing.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/extern/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py create mode 100755 .venv/lib/python3.9/site-packages/pvectorc.cpython-39-darwin.so create mode 100644 .venv/lib/python3.9/site-packages/pycparser-2.21.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/pycparser-2.21.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/pycparser-2.21.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/pycparser-2.21.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/pycparser-2.21.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/pycparser-2.21.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/pycparser/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/_ast_gen.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/_build_tables.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/_c_ast.cfg create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ast_transforms.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/c_ast.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/c_generator.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/c_lexer.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/c_parser.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/lextab.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ply/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ply/cpp.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ply/ctokens.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ply/lex.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ply/yacc.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/ply/ygen.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/plyparser.py create mode 100644 .venv/lib/python3.9/site-packages/pycparser/yacctab.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent-0.18.1.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent-0.18.1.dist-info/LICENSE.mit create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent-0.18.1.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent-0.18.1.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent-0.18.1.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent-0.18.1.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/__init__.pyi create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_checked_types.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_field_common.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_helpers.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_immutable.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_pbag.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_pclass.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_pdeque.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_plist.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_pmap.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_precord.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_pset.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_pvector.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_toolz.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/_transformations.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/py.typed create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/typing.py create mode 100644 .venv/lib/python3.9/site-packages/pyrsistent/typing.pyi create mode 100644 .venv/lib/python3.9/site-packages/requests-2.27.1.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/requests-2.27.1.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/requests-2.27.1.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/requests-2.27.1.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/requests-2.27.1.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/requests-2.27.1.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/requests/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/requests/__version__.py create mode 100644 .venv/lib/python3.9/site-packages/requests/_internal_utils.py create mode 100644 .venv/lib/python3.9/site-packages/requests/adapters.py create mode 100644 .venv/lib/python3.9/site-packages/requests/api.py create mode 100644 .venv/lib/python3.9/site-packages/requests/auth.py create mode 100644 .venv/lib/python3.9/site-packages/requests/certs.py create mode 100644 .venv/lib/python3.9/site-packages/requests/compat.py create mode 100644 .venv/lib/python3.9/site-packages/requests/cookies.py create mode 100644 .venv/lib/python3.9/site-packages/requests/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/requests/help.py create mode 100644 .venv/lib/python3.9/site-packages/requests/hooks.py create mode 100644 .venv/lib/python3.9/site-packages/requests/models.py create mode 100644 .venv/lib/python3.9/site-packages/requests/packages.py create mode 100644 .venv/lib/python3.9/site-packages/requests/sessions.py create mode 100644 .venv/lib/python3.9/site-packages/requests/status_codes.py create mode 100644 .venv/lib/python3.9/site-packages/requests/structures.py create mode 100644 .venv/lib/python3.9/site-packages/requests/utils.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/AUTHORS.rst create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/rfc3986-1.5.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/_mixin.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/abnf_regexp.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/api.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/builder.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/compat.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/iri.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/misc.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/normalizers.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/parseresult.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/uri.py create mode 100644 .venv/lib/python3.9/site-packages/rfc3986/validators.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/REQUESTED create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/entry_points.txt create mode 100644 .venv/lib/python3.9/site-packages/setuptools-60.5.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/setuptools/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_deprecation_warning.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/_collections.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/_msvccompiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/archive_util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/bcppcompiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/ccompiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/cmd.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/bdist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/bdist_dumb.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/bdist_msi.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/bdist_rpm.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/bdist_wininst.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/build.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/build_clib.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/build_ext.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/build_py.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/build_scripts.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/check.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/clean.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/config.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/install.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/install_data.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/install_egg_info.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/install_headers.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/install_lib.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/install_scripts.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/py37compat.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/register.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/sdist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/command/upload.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/config.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/core.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/cygwinccompiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/debug.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/dep_util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/dir_util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/dist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/errors.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/extension.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/fancy_getopt.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/file_util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/filelist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/log.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/msvc9compiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/msvccompiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/py35compat.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/py38compat.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/spawn.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/sysconfig.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/text_file.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/unixccompiler.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/version.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_distutils/versionpredicate.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_imp.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/more_itertools/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/more_itertools/more.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/more_itertools/recipes.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/ordered_set.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/__about__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/_manylinux.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/_musllinux.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/_structures.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/markers.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/requirements.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/specifiers.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/tags.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/utils.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/packaging/version.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/_vendor/pyparsing.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/archive_util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/build_meta.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/cli-32.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/cli-64.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/cli-arm64.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/cli.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/alias.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/bdist_egg.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/bdist_rpm.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/build_clib.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/build_ext.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/build_py.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/develop.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/dist_info.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/easy_install.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/egg_info.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/install.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/install_egg_info.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/install_lib.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/install_scripts.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/launcher manifest.xml create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/py36compat.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/register.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/rotate.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/saveopts.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/sdist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/setopt.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/test.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/upload.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/command/upload_docs.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/config.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/dep_util.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/depends.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/dist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/errors.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/extension.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/extern/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/glob.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/gui-32.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/gui-64.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/gui-arm64.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/gui.exe create mode 100644 .venv/lib/python3.9/site-packages/setuptools/installer.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/launch.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/logging.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/monkey.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/msvc.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/namespaces.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/package_index.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/py34compat.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/sandbox.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/script (dev).tmpl create mode 100644 .venv/lib/python3.9/site-packages/setuptools/script.tmpl create mode 100644 .venv/lib/python3.9/site-packages/setuptools/tests/integration/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/tests/integration/helpers.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/tests/integration/test_pip_install_sdist.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/unicode_utils.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/version.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/wheel.py create mode 100644 .venv/lib/python3.9/site-packages/setuptools/windows_support.py create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/LICENSE.APACHE2 create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/LICENSE.MIT create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/sniffio-1.2.0.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/sniffio/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/sniffio/_impl.py create mode 100644 .venv/lib/python3.9/site-packages/sniffio/_tests/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/sniffio/_tests/test_sniffio.py create mode 100644 .venv/lib/python3.9/site-packages/sniffio/_version.py create mode 100644 .venv/lib/python3.9/site-packages/sniffio/py.typed create mode 100644 .venv/lib/python3.9/site-packages/solana-0.23.0.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/solana-0.23.0.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/solana-0.23.0.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/solana-0.23.0.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/solana-0.23.0.dist-info/REQUESTED create mode 100644 .venv/lib/python3.9/site-packages/solana-0.23.0.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/solana/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/solana/_layouts/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/solana/_layouts/account.py create mode 100644 .venv/lib/python3.9/site-packages/solana/_layouts/shared.py create mode 100644 .venv/lib/python3.9/site-packages/solana/_layouts/system_instructions.py create mode 100644 .venv/lib/python3.9/site-packages/solana/_layouts/vote_instructions.py create mode 100644 .venv/lib/python3.9/site-packages/solana/account.py create mode 100644 .venv/lib/python3.9/site-packages/solana/blockhash.py create mode 100644 .venv/lib/python3.9/site-packages/solana/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/solana/instruction.py create mode 100644 .venv/lib/python3.9/site-packages/solana/keypair.py create mode 100644 .venv/lib/python3.9/site-packages/solana/message.py create mode 100644 .venv/lib/python3.9/site-packages/solana/publickey.py create mode 100644 .venv/lib/python3.9/site-packages/solana/py.typed create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/_utils/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/_utils/encoding.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/api.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/async_api.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/commitment.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/core.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/providers/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/providers/async_base.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/providers/async_http.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/providers/base.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/providers/core.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/providers/http.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/request_builder.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/responses.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/types.py create mode 100644 .venv/lib/python3.9/site-packages/solana/rpc/websocket_api.py create mode 100644 .venv/lib/python3.9/site-packages/solana/system_program.py create mode 100644 .venv/lib/python3.9/site-packages/solana/sysvar.py create mode 100644 .venv/lib/python3.9/site-packages/solana/transaction.py create mode 100644 .venv/lib/python3.9/site-packages/solana/utils/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/solana/utils/cluster.py create mode 100644 .venv/lib/python3.9/site-packages/solana/utils/ed25519_base.py create mode 100644 .venv/lib/python3.9/site-packages/solana/utils/helpers.py create mode 100644 .venv/lib/python3.9/site-packages/solana/utils/shortvec_encoding.py create mode 100644 .venv/lib/python3.9/site-packages/solana/utils/validate.py create mode 100644 .venv/lib/python3.9/site-packages/solana/vote_program.py create mode 100644 .venv/lib/python3.9/site-packages/spl/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/spl/py.typed create mode 100644 .venv/lib/python3.9/site-packages/spl/token/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/spl/token/_layouts.py create mode 100644 .venv/lib/python3.9/site-packages/spl/token/async_client.py create mode 100644 .venv/lib/python3.9/site-packages/spl/token/client.py create mode 100644 .venv/lib/python3.9/site-packages/spl/token/constants.py create mode 100644 .venv/lib/python3.9/site-packages/spl/token/core.py create mode 100644 .venv/lib/python3.9/site-packages/spl/token/instructions.py create mode 100644 .venv/lib/python3.9/site-packages/types_cachetools-4.2.10.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/types_cachetools-4.2.10.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/types_cachetools-4.2.10.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/types_cachetools-4.2.10.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/types_cachetools-4.2.10.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions-3.10.0.2.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions-3.10.0.2.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions-3.10.0.2.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions-3.10.0.2.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions-3.10.0.2.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions-3.10.0.2.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/typing_extensions.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3-1.26.9.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/urllib3-1.26.9.dist-info/LICENSE.txt create mode 100644 .venv/lib/python3.9/site-packages/urllib3-1.26.9.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/urllib3-1.26.9.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/urllib3-1.26.9.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/urllib3-1.26.9.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/urllib3/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/_collections.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/_version.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/connection.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/connectionpool.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/_appengine_environ.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/_securetransport/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/_securetransport/bindings.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/_securetransport/low_level.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/appengine.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/ntlmpool.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/pyopenssl.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/securetransport.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/contrib/socks.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/fields.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/filepost.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/packages/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/packages/backports/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/packages/backports/makefile.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/packages/six.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/poolmanager.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/request.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/response.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/connection.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/proxy.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/queue.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/request.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/response.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/retry.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/ssl_.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/ssl_match_hostname.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/ssltransport.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/timeout.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/url.py create mode 100644 .venv/lib/python3.9/site-packages/urllib3/util/wait.py create mode 100644 .venv/lib/python3.9/site-packages/websockets-10.2.dist-info/INSTALLER create mode 100644 .venv/lib/python3.9/site-packages/websockets-10.2.dist-info/LICENSE create mode 100644 .venv/lib/python3.9/site-packages/websockets-10.2.dist-info/METADATA create mode 100644 .venv/lib/python3.9/site-packages/websockets-10.2.dist-info/RECORD create mode 100644 .venv/lib/python3.9/site-packages/websockets-10.2.dist-info/WHEEL create mode 100644 .venv/lib/python3.9/site-packages/websockets-10.2.dist-info/top_level.txt create mode 100644 .venv/lib/python3.9/site-packages/websockets/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/__main__.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/auth.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/client.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/connection.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/datastructures.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/exceptions.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/extensions/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/extensions/base.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/extensions/permessage_deflate.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/frames.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/headers.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/http.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/http11.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/imports.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/__init__.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/auth.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/client.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/compatibility.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/framing.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/handshake.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/http.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/protocol.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/legacy/server.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/py.typed create mode 100644 .venv/lib/python3.9/site-packages/websockets/server.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/speedups.c create mode 100755 .venv/lib/python3.9/site-packages/websockets/speedups.cpython-39-darwin.so create mode 100644 .venv/lib/python3.9/site-packages/websockets/streams.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/typing.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/uri.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/utils.py create mode 100644 .venv/lib/python3.9/site-packages/websockets/version.py create mode 100644 .venv/pyvenv.cfg create mode 100755 app.py diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..2caf9c9 --- /dev/null +++ b/.envrc @@ -0,0 +1,3 @@ +python3 -m venv .venv +source ./.venv/bin/activate + diff --git a/.venv/bin/Activate.ps1 b/.venv/bin/Activate.ps1 new file mode 100644 index 0000000..9d3646a --- /dev/null +++ b/.venv/bin/Activate.ps1 @@ -0,0 +1,241 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/.venv/bin/activate b/.venv/bin/activate new file mode 100644 index 0000000..d8617fb --- /dev/null +++ b/.venv/bin/activate @@ -0,0 +1,66 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1="(.venv) ${PS1:-}" + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null +fi diff --git a/.venv/bin/activate.csh b/.venv/bin/activate.csh new file mode 100644 index 0000000..cb3aba5 --- /dev/null +++ b/.venv/bin/activate.csh @@ -0,0 +1,25 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = "(.venv) $prompt" +endif + +alias pydoc python -m pydoc + +rehash diff --git a/.venv/bin/activate.fish b/.venv/bin/activate.fish new file mode 100644 index 0000000..5b7813f --- /dev/null +++ b/.venv/bin/activate.fish @@ -0,0 +1,64 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/); you cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/.venv/bin/jsonschema b/.venv/bin/jsonschema new file mode 100755 index 0000000..f982aa3 --- /dev/null +++ b/.venv/bin/jsonschema @@ -0,0 +1,8 @@ +#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9 +# -*- coding: utf-8 -*- +import re +import sys +from jsonschema.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/normalizer b/.venv/bin/normalizer new file mode 100755 index 0000000..4337c73 --- /dev/null +++ b/.venv/bin/normalizer @@ -0,0 +1,8 @@ +#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli.normalizer import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/.venv/bin/pip b/.venv/bin/pip new file mode 100755 index 0000000..133313b --- /dev/null +++ b/.venv/bin/pip @@ -0,0 +1,8 @@ +#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip3 b/.venv/bin/pip3 new file mode 100755 index 0000000..133313b --- /dev/null +++ b/.venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip3.9 b/.venv/bin/pip3.9 new file mode 100755 index 0000000..133313b --- /dev/null +++ b/.venv/bin/pip3.9 @@ -0,0 +1,8 @@ +#!/Users/mwiegand/Projekte/blockirgendwaschaindings/.venv/bin/python3.9 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/python b/.venv/bin/python new file mode 120000 index 0000000..e616d26 --- /dev/null +++ b/.venv/bin/python @@ -0,0 +1 @@ +python3.9 \ No newline at end of file diff --git a/.venv/bin/python3 b/.venv/bin/python3 new file mode 120000 index 0000000..e616d26 --- /dev/null +++ b/.venv/bin/python3 @@ -0,0 +1 @@ +python3.9 \ No newline at end of file diff --git a/.venv/bin/python3.9 b/.venv/bin/python3.9 new file mode 120000 index 0000000..9c670a4 --- /dev/null +++ b/.venv/bin/python3.9 @@ -0,0 +1 @@ +/usr/local/opt/python@3.9/bin/python3.9 \ No newline at end of file diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/LICENSE new file mode 100644 index 0000000..d112d04 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright 2020 Dag Brattli + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/METADATA b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/METADATA new file mode 100644 index 0000000..76ed2d7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/METADATA @@ -0,0 +1,128 @@ +Metadata-Version: 2.1 +Name: OSlash +Version: 0.6.3 +Summary: OSlash (Ø) for Python 3.8+ +Home-page: https://github.com/dbrattli/oslash +Author: Dag Brattli +Author-email: dag@brattli.net +License: MIT License +Download-URL: https://github.com/dbrattli/oslash +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Description-Content-Type: text/markdown +Requires-Dist: typing-extensions + +# Functors, Applicatives, And Monads in Python + +![Python package](https://github.com/dbrattli/OSlash/workflows/Python%20package/badge.svg) + +OSlash (Ø) is a library for playing with functional programming in Python 3.8+. It's an attempt to re-implement some of +the code from [Learn You a Haskell for Great Good!](http://learnyouahaskell.com/) in Python 3.8. OSlash unifies +functional and object oriented paradigms by grouping related functions within classes. Objects are however never used +for storing values or mutable data, and data only lives within function closures. + +OSlash is intended to be a tutorial. For practical functional programming in Python in production environments you +should use [FSlash](https://github.com/dbrattli/fslash) instead. + +## Install + +```bash +> pip3 install oslash +``` + +The project currently contains implementations for: + +## Abstract Base Classes + +- **[Functor](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#functors)**, for stuff that can be mapped +- **[Applicative](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#applicatives)**, for callable stuff +- **Monoid**, for associative stuff +- **[Monad](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#monads)**, for monadic stuff + +## And Some Monads + +- **Identity**, boxed stuff in its simplest form +- **[Maybe (Just | Nothing)](https://github.com/dbrattli/oslash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures)**, for optional stuff +- **Either (Right | Left)**, for possible failures +- **List**, purely functional list of stuff +- **[IO Action](https://github.com/dbrattli/OSlash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures#io-monad)**, for impure stuff +- **[Writer](https://github.com/dbrattli/OSlash/wiki/Three-Useful-Monads#the-writer-monad)**, for logging stuff +- **[Reader](https://github.com/dbrattli/OSlash/wiki/Three-Useful-Monads#the-reader-monad)**, for callable stuff +- **State**, for stateful computations of stuff +- **Cont**, for continuation of stuff + +## Monadic functions + +- **>>**, for sequencing monadic actions +- **lift**, for mapping a function over monadic values +- **join**, for removing one level of monadic structure +- **compose**, for composing monadic functions + +## Utility functions + +- **compose**, for composing 0 to n functions + +## But why? + +Yes, I know there are other projects out there like [PyMonad](https://bitbucket.org/jason_delaat/pymonad/), +[fn.py](https://github.com/kachayev/fn.py). I'm simply doing this in order to better understand the +[book](http://learnyouahaskell.com/). It's so much easier to learn when you implement things yourself. The code may be +similar to PyMonad in structure, but is quite different in implementation. + +Why is the project called OSlash? OSlash is the Norwegian character called [Oslash](http://en.wikipedia.org/wiki/Ø). +Initially I wanted to create a project that used Ø and ø (unicode) for the project name and modules. It didn't work out +well, so I renamed it to OSlash. + +## Examples + +Haskell: + +```haskell +> fmap (+3) (Just 2) +Just 5 + +> (+3) <$> (Just 2) +Just 5 +``` + +Python: + +```python +>>> Just(2).map(lambda x: x+3) +Just 5 + +>>> (lambda x: x+3) % Just(2) +Just 5 + +``` + +IO Actions: + +```python +from oslash import put_line, get_line + +main = put_line("What is your name?") | (lambda _: + get_line() | (lambda name: + put_line("What is your age?") | (lambda _: + get_line() | (lambda age: + put_line("Hello " + name + "!") | (lambda _: + put_line("You are " + age + " years old")))))) + +if __name__ == "__main__": + main() +``` + +## Tutorials + +- [Functors, Applicatives, And Monads In Pictures](https://github.com/dbrattli/oslash/wiki/Functors,-Applicatives,-And-Monads-In-Pictures) in Python. +- [Three Useful Monads](https://github.com/dbrattli/OSlash/wiki/Three-Useful-Monads) _(in progress)_ +- [Using Either monad in Python](https://medium.com/@rnesytov/using-either-monad-in-python-b6eac698dff5) + + diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/RECORD b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/RECORD new file mode 100644 index 0000000..7a46308 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/RECORD @@ -0,0 +1,53 @@ +OSlash-0.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +OSlash-0.6.3.dist-info/LICENSE,sha256=4Nj7npUZIOpByuFd2Y4NnLIEHToVvcRRBeD-HAmgVuI,1050 +OSlash-0.6.3.dist-info/METADATA,sha256=Y7u5wnr-CHjNKb8ZzLGkhuGuPVCYI8LpRH-CVabi-0I,4740 +OSlash-0.6.3.dist-info/RECORD,, +OSlash-0.6.3.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92 +OSlash-0.6.3.dist-info/top_level.txt,sha256=fncxVNYZeY5RRUGVK-06Zrh1NxnF_tb1Xv0aT8L8Q6s,7 +OSlash-0.6.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +oslash/__init__.py,sha256=8XYliRj0rScyHp6kEGQ-apAb4RFt3k9sJKdrdqyUoik,606 +oslash/__pycache__/__init__.cpython-39.pyc,, +oslash/__pycache__/_version.cpython-39.pyc,, +oslash/__pycache__/cont.cpython-39.pyc,, +oslash/__pycache__/do.cpython-39.pyc,, +oslash/__pycache__/either.cpython-39.pyc,, +oslash/__pycache__/identity.cpython-39.pyc,, +oslash/__pycache__/ioaction.cpython-39.pyc,, +oslash/__pycache__/list.cpython-39.pyc,, +oslash/__pycache__/maybe.cpython-39.pyc,, +oslash/__pycache__/monadic.cpython-39.pyc,, +oslash/__pycache__/observable.cpython-39.pyc,, +oslash/__pycache__/reader.cpython-39.pyc,, +oslash/__pycache__/state.cpython-39.pyc,, +oslash/__pycache__/writer.cpython-39.pyc,, +oslash/_version.py,sha256=bfRfzZb0Upr5hkWvmAXH6ZtM9vl2RzrvUibynTjIgOg,497 +oslash/cont.py,sha256=b6lhAeNksKn7QrG3vCT6weHkpAWTQKOOZKkMFGt2g4g,2435 +oslash/do.py,sha256=2R9HgYhfYaJ4sXXAorZa4Dx-2vWw8dzX7M4qc_oT4II,6379 +oslash/either.py,sha256=73mXh5iCzSLBJgk3QlgMX2xN-tZCam3hobbeNNAlbzw,4058 +oslash/identity.py,sha256=DegmadVO4vKqbV46xlTYQ-ZWnYzvG5mxLqAjurluefo,1761 +oslash/ioaction.py,sha256=4ig3EzsTgWq5qDAnZVkpkjawFIAoooG2jZEOwGBKDfY,6784 +oslash/list.py,sha256=teV3WwJD0WlNmd_PBU37_voDXp9kkFezxWr1rF1keFs,8725 +oslash/maybe.py,sha256=WKidTTmWz6gIe1HidcGC2-ZZG1rygmvyHpwIEpiwufc,6643 +oslash/monadic.py,sha256=JKVo0lkmAKatvK9tdGT4ZQcnlkdKObf5B7Zrtuo8sn4,597 +oslash/observable.py,sha256=n-BEkav1j_e3F31WTpxVuCjY54nvJecyg3OMKvonBZM,2968 +oslash/reader.py,sha256=4kR47DJh2yg9cv4jqdWnMwBWUPSMvbtDF5rKUzJtZOQ,4527 +oslash/state.py,sha256=RPlCS4h5IJ0OZGcomRxxk6maB0hcIxOqbzJid6iHgIo,2277 +oslash/typing/__init__.py,sha256=nQgRsFD6GP-9YHKPEtSgsOiAUTxJ82leBSCWSB27Xew,133 +oslash/typing/__pycache__/__init__.cpython-39.pyc,, +oslash/typing/__pycache__/applicative.cpython-39.pyc,, +oslash/typing/__pycache__/functor.cpython-39.pyc,, +oslash/typing/__pycache__/monad.cpython-39.pyc,, +oslash/typing/__pycache__/monoid.cpython-39.pyc,, +oslash/typing/applicative.py,sha256=OaBvmf0T5YHuRYPL2gPz7gZDsjtppEZN-lMCf3AS8Ec,1869 +oslash/typing/functor.py,sha256=XGTkT4s95mjWBQw0E994AFPujuBdQx9oFhnIxVTMt9k,1141 +oslash/typing/monad.py,sha256=3z9WlsEAPiv1Ls3WpsuyAYx3h45UFXIK-a8oXgmXTvI,2745 +oslash/typing/monoid.py,sha256=bdUo8QGn7Pdc8iVEn7XT_jwdBG75_kG1xRyk7k9k_uk,967 +oslash/util/__init__.py,sha256=YQdehJ8CwGsguuJCQkGjQu7k6jmRXyrXMDPsDVO1Lq8,87 +oslash/util/__pycache__/__init__.cpython-39.pyc,, +oslash/util/__pycache__/basic.cpython-39.pyc,, +oslash/util/__pycache__/fn.cpython-39.pyc,, +oslash/util/__pycache__/numerals.cpython-39.pyc,, +oslash/util/basic.py,sha256=e5AoXonkxyGeyXw40foVdBXDAajI-n2Gcv7xZ-jwbN0,99 +oslash/util/fn.py,sha256=IHRM6JPSCWam9dzZs-uL9rP4bmyhVlsnJXXS8DoTwVw,2405 +oslash/util/numerals.py,sha256=F2Us8AmoEwGdnXROB3OO9xchYcETzW60LNC_Eki7Dg8,1084 +oslash/writer.py,sha256=PHCFzHIPe-txosY1gi53CCC1En01WiGtia0YvUJU96M,3342 diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/WHEEL new file mode 100644 index 0000000..83ff02e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/top_level.txt new file mode 100644 index 0000000..6b177e1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/top_level.txt @@ -0,0 +1 @@ +oslash diff --git a/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/zip-safe b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/OSlash-0.6.3.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/LICENSE new file mode 100644 index 0000000..91e18a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/LICENSE @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/METADATA new file mode 100644 index 0000000..1de4243 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/METADATA @@ -0,0 +1,246 @@ +Metadata-Version: 2.1 +Name: PyNaCl +Version: 1.5.0 +Summary: Python binding to the Networking and Cryptography (NaCl) library +Home-page: https://github.com/pyca/pynacl/ +Author: The PyNaCl developers +Author-email: cryptography-dev@python.org +License: Apache License 2.0 +Platform: UNKNOWN +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +License-File: LICENSE +Requires-Dist: cffi (>=1.4.1) +Provides-Extra: docs +Requires-Dist: sphinx (>=1.6.5) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests' +Requires-Dist: hypothesis (>=3.27.0) ; extra == 'tests' + +=============================================== +PyNaCl: Python binding to the libsodium library +=============================================== + +.. image:: https://img.shields.io/pypi/v/pynacl.svg + :target: https://pypi.org/project/PyNaCl/ + :alt: Latest Version + +.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=main + :target: https://codecov.io/github/pyca/pynacl?branch=main + +.. image:: https://img.shields.io/pypi/pyversions/pynacl.svg + :target: https://pypi.org/project/PyNaCl/ + :alt: Compatible Python Versions + +PyNaCl is a Python binding to `libsodium`_, which is a fork of the +`Networking and Cryptography library`_. These libraries have a stated goal of +improving usability, security and speed. It supports Python 3.6+ as well as +PyPy 3. + +.. _libsodium: https://github.com/jedisct1/libsodium +.. _Networking and Cryptography library: https://nacl.cr.yp.to/ + +Features +-------- + +* Digital signatures +* Secret-key encryption +* Public-key encryption +* Hashing and message authentication +* Password based key derivation and password hashing + +`Changelog`_ +------------ + +.. _Changelog: https://pynacl.readthedocs.io/en/stable/changelog/ + +Installation +============ + +Binary wheel install +-------------------- + +PyNaCl ships as a binary wheel on macOS, Windows and Linux ``manylinux1`` [#many]_ , +so all dependencies are included. Make sure you have an up-to-date pip +and run: + +.. code-block:: console + + $ pip install pynacl + +Faster wheel build +------------------ + +You can define the environment variable ``LIBSODIUM_MAKE_ARGS`` to pass arguments to ``make`` +and enable `parallelization`_: + +.. code-block:: console + + $ LIBSODIUM_MAKE_ARGS=-j4 pip install pynacl + +Linux source build +------------------ + +PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled +with PyNaCl so to install you can run: + +.. code-block:: console + + $ pip install pynacl + +If you'd prefer to use the version of ``libsodium`` provided by your +distribution, you can disable the bundled copy during install by running: + +.. code-block:: console + + $ SODIUM_INSTALL=system pip install pynacl + +.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools + is generally discouraged, and is completely unsupported in PyNaCl's case. + +.. _parallelization: https://www.gnu.org/software/make/manual/html_node/Parallel.html + +.. _libsodium: https://github.com/jedisct1/libsodium + +.. [#many] `manylinux1 wheels `_ + are built on a baseline linux environment based on Centos 5.11 + and should work on most x86 and x86_64 glibc based linux environments. + +Changelog +========= + +1.5.0 (2022-01-07) +------------------ + +* **BACKWARDS INCOMPATIBLE:** Removed support for Python 2.7 and Python 3.5. +* **BACKWARDS INCOMPATIBLE:** We no longer distribute ``manylinux1`` + wheels. +* Added ``manylinux2014``, ``manylinux_2_24``, ``musllinux``, and macOS + ``universal2`` wheels (the latter supports macOS ``arm64``). +* Update ``libsodium`` to 1.0.18-stable (July 25, 2021 release). +* Add inline type hints. + +1.4.0 (2020-05-25) +------------------ + +* Update ``libsodium`` to 1.0.18. +* **BACKWARDS INCOMPATIBLE:** We no longer distribute 32-bit ``manylinux1`` + wheels. Continuing to produce them was a maintenance burden. +* Added support for Python 3.8, and removed support for Python 3.4. +* Add low level bindings for extracting the seed and the public key + from crypto_sign_ed25519 secret key +* Add low level bindings for deterministic random generation. +* Add ``wheel`` and ``setuptools`` setup_requirements in ``setup.py`` (#485) +* Fix checks on very slow builders (#481, #495) +* Add low-level bindings to ed25519 arithmetic functions +* Update low-level blake2b state implementation +* Fix wrong short-input behavior of SealedBox.decrypt() (#517) +* Raise CryptPrefixError exception instead of InvalidkeyError when trying + to check a password against a verifier stored in a unknown format (#519) +* Add support for minimal builds of libsodium. Trying to call functions + not available in a minimal build will raise an UnavailableError + exception. To compile a minimal build of the bundled libsodium, set + the SODIUM_INSTALL_MINIMAL environment variable to any non-empty + string (e.g. ``SODIUM_INSTALL_MINIMAL=1``) for setup. + +1.3.0 2018-09-26 +---------------- + +* Added support for Python 3.7. +* Update ``libsodium`` to 1.0.16. +* Run and test all code examples in PyNaCl docs through sphinx's + doctest builder. +* Add low-level bindings for chacha20-poly1305 AEAD constructions. +* Add low-level bindings for the chacha20-poly1305 secretstream constructions. +* Add low-level bindings for ed25519ph pre-hashed signing construction. +* Add low-level bindings for constant-time increment and addition + on fixed-precision big integers represented as little-endian + byte sequences. +* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API. +* Add low-level bindings for libsodium's crypto_kx... key exchange + construction. +* Set hypothesis deadline to None in tests/test_pwhash.py to avoid + incorrect test failures on slower processor architectures. GitHub + issue #370 + +1.2.1 - 2017-12-04 +------------------ + +* Update hypothesis minimum allowed version. +* Infrastructure: add proper configuration for readthedocs builder + runtime environment. + +1.2.0 - 2017-11-01 +------------------ + +* Update ``libsodium`` to 1.0.15. +* Infrastructure: add jenkins support for automatic build of + ``manylinux1`` binary wheels +* Added support for ``SealedBox`` construction. +* Added support for ``argon2i`` and ``argon2id`` password hashing constructs + and restructured high-level password hashing implementation to expose + the same interface for all hashers. +* Added support for 128 bit ``siphashx24`` variant of ``siphash24``. +* Added support for ``from_seed`` APIs for X25519 keypair generation. +* Dropped support for Python 3.3. + +1.1.2 - 2017-03-31 +------------------ + +* reorder link time library search path when using bundled + libsodium + +1.1.1 - 2017-03-15 +------------------ + +* Fixed a circular import bug in ``nacl.utils``. + +1.1.0 - 2017-03-14 +------------------ + +* Dropped support for Python 2.6. +* Added ``shared_key()`` method on ``Box``. +* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or + ``SecretBox`` and it will automatically generate a random nonce. +* Added support for ``siphash24``. +* Added support for ``blake2b``. +* Added support for ``scrypt``. +* Update ``libsodium`` to 1.0.11. +* Default to the bundled ``libsodium`` when compiling. +* All raised exceptions are defined mixing-in + ``nacl.exceptions.CryptoError`` + +1.0.1 - 2016-01-24 +------------------ + +* Fix an issue with absolute paths that prevented the creation of wheels. + +1.0 - 2016-01-23 +---------------- + +* PyNaCl has been ported to use the new APIs available in cffi 1.0+. + Due to this change we no longer support PyPy releases older than 2.6. +* Python 3.2 support has been dropped. +* Functions to convert between Ed25519 and Curve25519 keys have been added. + +0.3.0 - 2015-03-04 +------------------ + +* The low-level API (`nacl.c.*`) has been changed to match the + upstream NaCl C/C++ conventions (as well as those of other NaCl bindings). + The order of arguments and return values has changed significantly. To + avoid silent failures, `nacl.c` has been removed, and replaced with + `nacl.bindings` (with the new argument ordering). If you have code which + calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review + the new docstrings and update your code/imports to match the new + conventions. + + diff --git a/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/RECORD new file mode 100644 index 0000000..23bfe7c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/RECORD @@ -0,0 +1,68 @@ +PyNaCl-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyNaCl-1.5.0.dist-info/LICENSE,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694 +PyNaCl-1.5.0.dist-info/METADATA,sha256=U9PLLkcKk_YC_Tl5OSoMiAmblalKU9qFsRtxiwa-TiM,8656 +PyNaCl-1.5.0.dist-info/RECORD,, +PyNaCl-1.5.0.dist-info/WHEEL,sha256=4Pc_qD0VhjiuSiOgv99UcgTmp8ljuSMnWHBjm4-tsjo,114 +PyNaCl-1.5.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13 +nacl/__init__.py,sha256=0IUunzBT8_Jn0DUdHacBExOYeAEMggo8slkfjo7O0XM,1116 +nacl/__pycache__/__init__.cpython-39.pyc,, +nacl/__pycache__/encoding.cpython-39.pyc,, +nacl/__pycache__/exceptions.cpython-39.pyc,, +nacl/__pycache__/hash.cpython-39.pyc,, +nacl/__pycache__/hashlib.cpython-39.pyc,, +nacl/__pycache__/public.cpython-39.pyc,, +nacl/__pycache__/secret.cpython-39.pyc,, +nacl/__pycache__/signing.cpython-39.pyc,, +nacl/__pycache__/utils.cpython-39.pyc,, +nacl/_sodium.abi3.so,sha256=eZC39TLe_Z_mLp_OdkxMWrxMsYw6NyDy9voJ2pMwbo0,1021039 +nacl/bindings/__init__.py,sha256=BDlStrds2EuUS4swOL4pnf92PWVS_CHRCptX3KhEX-s,16997 +nacl/bindings/__pycache__/__init__.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_aead.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_box.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_core.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_generichash.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_hash.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_kx.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_pwhash.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_scalarmult.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_secretbox.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_secretstream.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_shorthash.cpython-39.pyc,, +nacl/bindings/__pycache__/crypto_sign.cpython-39.pyc,, +nacl/bindings/__pycache__/randombytes.cpython-39.pyc,, +nacl/bindings/__pycache__/sodium_core.cpython-39.pyc,, +nacl/bindings/__pycache__/utils.cpython-39.pyc,, +nacl/bindings/crypto_aead.py,sha256=BIw1k_JCfr5ylZk0RF5rCFIM1fhfLkEa-aiWkrfffNE,15597 +nacl/bindings/crypto_box.py,sha256=Ox0NG2t4MsGhBAa7Kgah4o0gc99ULMsqkdX56ofOouY,10139 +nacl/bindings/crypto_core.py,sha256=6u9G3y7H-QrawO785UkFFFtwDoCkeHE63GOUl9p5-eA,13736 +nacl/bindings/crypto_generichash.py,sha256=9mX0DGIIzicr-uXrqFM1nU4tirasbixDwbcdfV7W1fc,8852 +nacl/bindings/crypto_hash.py,sha256=Rg1rsEwE3azhsQT-dNVPA4NB9VogJAKn1EfxYt0pPe0,2175 +nacl/bindings/crypto_kx.py,sha256=oZNVlNgROpHOa1XQ_uZe0tqIkdfuApeJlRnwR23_74k,6723 +nacl/bindings/crypto_pwhash.py,sha256=laVDo4xFUuGyEjtZAU510AklBF6ablBy7Z3HN1WDYjY,18848 +nacl/bindings/crypto_scalarmult.py,sha256=_DX-mst2uCnzjo6fP5HRTnhv1BC95B9gmJc3L_or16g,8244 +nacl/bindings/crypto_secretbox.py,sha256=KgZ1VvkCJDlQ85jtfe9c02VofPvuEgZEhWni-aX3MsM,2914 +nacl/bindings/crypto_secretstream.py,sha256=G0FgZS01qA5RzWzm5Bdms8Yy_lvgdZDoUYYBActPmvQ,11165 +nacl/bindings/crypto_shorthash.py,sha256=PQU7djHTLDGdVs-w_TsivjFHHp5EK5k2Yh6p-6z0T60,2603 +nacl/bindings/crypto_sign.py,sha256=53j2im9E4F79qT_2U8IfCAc3lzg0VMwEjvAPEUccVDg,10342 +nacl/bindings/randombytes.py,sha256=uBK3W4WcjgnjZdWanrX0fjYZpr9KHbBgNMl9rui-Ojc,1563 +nacl/bindings/sodium_core.py,sha256=9Y9CX--sq-TaPaQRPRpx8SWDSS9PJOja_Cqb-yqyJNQ,1039 +nacl/bindings/utils.py,sha256=KDwQnadXeNMbqEA1SmpNyCVo5k8MiUQa07QM66VzfXM,4298 +nacl/encoding.py,sha256=qTAPc2MXSkdh4cqDVY0ra6kHyViHMCmEo_re7cgGk5w,2915 +nacl/exceptions.py,sha256=GZH32aJtZgqCO4uz0LRsev8z0WyvAYuV3YVqT9AAQq4,2451 +nacl/hash.py,sha256=EYBOe6UVc9SUQINEmyuRSa1QGRSvdwdrBzTL1tdFLU8,6392 +nacl/hashlib.py,sha256=L5Fv75St8AMPvb-GhA4YqX5p1mC_Sb4HhC1NxNQMpJA,4400 +nacl/public.py,sha256=RVGCWQRjIJOmW-8sNrVLtsDjMMGx30i6UyfViGCnQNA,14792 +nacl/pwhash/__init__.py,sha256=XSDXd7wQHNLEHl0mkHfVb5lFQsp6ygHkhen718h0BSM,2675 +nacl/pwhash/__pycache__/__init__.cpython-39.pyc,, +nacl/pwhash/__pycache__/_argon2.cpython-39.pyc,, +nacl/pwhash/__pycache__/argon2i.cpython-39.pyc,, +nacl/pwhash/__pycache__/argon2id.cpython-39.pyc,, +nacl/pwhash/__pycache__/scrypt.cpython-39.pyc,, +nacl/pwhash/_argon2.py,sha256=jL1ChR9biwYh3RSuc-LJ2-W4DlVLHpir-XHGX8cpeJQ,1779 +nacl/pwhash/argon2i.py,sha256=IIvIuO9siKUu5-Wpz0SGiltLQv7Du_mi9BUE8INRK_4,4405 +nacl/pwhash/argon2id.py,sha256=H22i8O4j9Ws4L3JsXl9TRcJzDcyaVumhQRPzINAgJWM,4433 +nacl/pwhash/scrypt.py,sha256=fMr3Qht1a1EY8aebNNntfLRjinIPXtKYKKrrBhY5LDc,6986 +nacl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nacl/secret.py,sha256=kauBNuP-0rb3TjU2EMBMu5Vnmzjnscp1bRqMspy5LzU,12108 +nacl/signing.py,sha256=kbTEUyHLUMaNLv1nCjxzGxCs82Qs5w8gxE_CnEwPuIU,8337 +nacl/utils.py,sha256=gmlTD1x9ZNwzHd8LpALH1CHud-Htv8ejRb3y7TyS9f0,2341 diff --git a/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/WHEEL new file mode 100644 index 0000000..b254a18 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp36-abi3-macosx_10_10_universal2 + diff --git a/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..f52507f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_sodium +nacl diff --git a/.venv/lib/python3.9/site-packages/_cffi_backend.cpython-39-darwin.so b/.venv/lib/python3.9/site-packages/_cffi_backend.cpython-39-darwin.so new file mode 100755 index 0000000000000000000000000000000000000000..14dfdccb4023089c72ca959cf09653d5427645fa GIT binary patch literal 202488 zcmeFadw3K@*1$cJWFTNdj|v(UG-A-8h(wJNC7OW*dSC{FfZ`=ajYM4)SeOw|$Ym!{ z+IA4{=)SsfU3cTVUT{|t*;Nv53F<1~4MYUIK#$`Ea7_S_`F^LmCl_?zXTR^i?|IDg zOuD)*r%s(Zb?VfqQ`PSt_~uYgm&={%a=Fs^_2H)zQSYiDOyZ%&HKRbD=+WXdEJ zb=W@=yZzbk9^&K&DD_uf9-2BQk-uOHJ3;^72`s@b$Ag zq7H}GRAr(%uVVVN zS)C0&9NwZuHoPH&Yy{-JTYDY6l8^H8idmtVH&46Ki8vhIKVPxotuC=MB;S+eyWvV? zd3ku+?3<_EP=52Y%IWI8d;iw_Xv518#^6uh4~Jo=*#FDRr`}XvIrF;PrY7IJ!&`L7 zhBw20xa8dd-4#qnSop4$3d_rjeUp40(F7bUYLaozdy)MvFq6D2G3DjcZY`fR_qOY& z-&#H+H1j{g6W+J~Qo7+K{PB-u63_DT8>e^n>d)~@ckr9zv{T-@wo~B!vm3lyulv97 zt8v@y+a!aDKY8y8PuMZ8QtW^5$zgogoqpH$wBcntd@S$(H{acGUFf>b3J&j&nm#tX ztXw;fy#L?uLO0Evdfg4>w_i6?M(^S97N6(9bE=g0uI)X#6BQLguGsLdpQVJs;qaC? z@Veq;*I)xW>SypAKu-MO@R}TWE~mYctt)J&+dH5NAM5xW4a`5H-R^rg5Kbg6jQ=oDAekuKNn%Trv#$tKXU_}z46ifbQC zSF=9FHD|QTbp&yx{N%4U@$!?r89cvMvA_J!T9@Mbb613%Y$x!Pde-0e&1q?Op51!a z1=sbsW5IJz<`CzVJiU6mdT6~}rzhVfj=v0k(jF;mxBPfQZr54iSu@YN_2%p4S6O-U z&>Q9wAVu;^yOR0cH@F!Q-SbbJJ1aExwxJ_C3OVI&>Qc^iQt|j! zbM9HAMRd)cKJ%7YXN{UZbL!~n;b}1b&C{n1RYFZt&bn;s%vr?DI?H!h9^jwKPuiP1 zCB?N2It_N-w_ia&5&re>R|@<}fnO=`D+PX~z^@eel>)y~;8zO#N`YS~@GAv=rNFNg z`2PcmGSZj47$e~9ET+-%t`Q8vK zP#{|P>Jti|fN@(>BIqF!%zY9`h6sw^<)kDensc zi9|wAr0V9ZEIqQePr$sbi4wZ;v9*M{CCBNKBkK9bw{F*1v;S^?Vy!=Ns89IGn!=uf zrB-*0fQ%M?+#4jL`L#AWc^j>hZgtva9TEunx00e@ue2sPuy2Jr6up8J>((S0E?1HH z<2thsa>iz?|9?JLV^P z)bvd6faCkU!-QWk4(4iE8*8PG4Sk}!0wr&=d@8><5e*tN_vCx!c4{+l^k~0Fz6a5loE#t7IX+8K<)5*0{0Wg`-RI=W=P>LX zM`1spJ>YwROAz;peixC5r-b zVF!>=HjohxkbmFU9Y|dgNG|~*gV%aC352FNm>gmk2O7B?ATJ&Um*q~&9~Ilnm!$N>nTno})6Y6QqhNgyz}gGtB+5_NzKIt<7wNgx*p5Rn_! z$7E47IjsZ8SvHW%93a~(yVGPw637pfU+d{##He9#XB`iTwE9!*0lSjI)_FE;8S7eI z3%McE^9pK<^|hoKp3jf7*{#0!{zM}7v|v;uUClt+<95A)IC5FL>~@CcOTQ=i+~IxR zW#7TYQ}z=8*<#HF^x9EZyIMv~xqbT0H!$n$c^Z%{ql#p(>94s)+M7JOF|JfMN?P@3 zxOF+Lr|>rGs|gASs4Y6yLCr&x;z#H$o;!$@Z-PK1t# z+?nkPd923-XYLlecTHjx2q4 z7;OyAv(muGZ&rDAGrY-4wP~x>MWukz5-`KteQl#lvqKuib=TbVpm}?CzzA;##hrWU z)N<)Ksp4)*&?5+nJf@d9h4D&#ksD0dy@;%oD^#8-CeZ8L9&5r0rSsRr`e`lxD8 zc1L?oev~57euIdmlAqx3qCmwvrX=eO-nmN>3PF3{KimmeK+QQylTDU^;ra*bsyV9dyw(7s_S^j)s62N59a97XElUssHD*yu5L`x>Yt@# zW4cy<1pU>Rrq$oW^I6%}Wm01Ro1=;%`@ATe%z=@8J%k#Udi z-jLYg*WS43Sy#5e{|oSA;yKV+;_r3|PuCJYZRs&i2^JQ#0K zWm9Yc(SFlLHGDjz&!yK`aK!$89pgh=VW)(4c1d|qQYxGxyW(%rbK7M)MA3|_l97JO zgJ^S_AV}r9R9ZJz-Xi-_v^sE=kXx(Y1a@!mh6dB#1$@f2%t2d*ZkhiHLY3kz!>o6u zMtUNee+eT&c=#n^WWeUrJAR|w>yK6s^BdJ!{+2P>Ky(F>&&O@K?UOC@n-KX;N^iCf z?14OWq?R!3s{N)z?)v*>wPAeJ1&3C_L3=fsVFnrETO$YDq3ZgLT73i%sEt?AXn&$- zJm2R)3N4R~CTC-IxEPwOmf5d^Ccy_p{i2$WMRJ#m+P1H=!c}{vLptAM?~+50+^M@X z;~2rtyed1ENq}k(0#~YigF5Cobwu-5->={U`~r%`pIR=2RjN?Dw;dK`Bw)04spM3M zDD`T&t(988)-EZ3yoTzdg-ZYm(cj=<-69l^7H+nqViGmQj_N_v$aORitHh2v!;ZQ@ zqRu92FVpI)DBCZ?pc|a0gyr`n?`ixVqIrMe{SD_SVflr4*G?+xT(3`>=r`K@#=$_e zDnUP2-BDuf^F_A1S!5y+YH21)Zc`BMIFmli2-WUz9dx;B_q$-Y@bN?iLMe7!-bP|S zVOTx1Qg^Qxei|ygIJo7FM8fN`PKOK3((JaC*;>6?)|k2;tZ&oqTS^_VEGVo9958kU zjJ>i}R|99g>f8f?!7K9;c}?-Y$@yIea=~5(0Br(^bLZ9WRCpcgz$-K_#g5BcS9{3D zt66uq3dW?UR_M{$DNajXRPkv}OA>ZVinNDXboaja1A0^+rWhnMpEKVWd3vNdO;4;06zrWpUND*zC^#_RGTLi*31Zr-iQ0t1?E@0$O-eC-to>YJ z{G;F&UP003oxKHP%~iWAfh>!@e35u|#Q&5e#;Q&QNa;^q?JdLo#z%hR-?mt1%*=7b z@j{5}H!n#?qBf)T=mi_JIybcD{WzqZdf!7H-hcaz^S;A=&-==G-(bJL7I)s8?f0Gc zspS$KW=&%`^$&U?5%<#NstXTT>zEmhRci7Y-o^IPh_G4<1etJhz(gjT7vzaFt)`MPyO!5nZd*mQw31idw(9(rx#kn)=32r?bS4oei;c)1NKWssD_G zSbqvIp{BV)lX{FOK(n%-Uoe`2w`fOKcCrz_EEZm3k-nODk>H6zwj1Y4G zh*WArJwKv2gipF-z+Uahl4aIgS0MH@Z^<&nWeUfqk#ef_6h?m1*y0>}a$d)qnw&p^ zjrKAxe-Df%HFJsBj>(c#H%pr0^PZJXMGk)*1-EGaIr7ko+fQEEw$d9q)(=Fia&wB@ z;Ujd@X9q(kGgFLypN!^kH5FJB!A+qhN<$`C(WorH(Ian5EPPNTKPHVB0b>Uf*j&K*6MeM$Sqf6S^%ePIl~U8~-D;XhRP1?$egM{u z>_^cMP9}t+A>)G(WA|!7fU((DBi37VAwT5yN2z@!Xrl)PAqMvF+G(R7*2aL}YBWkb0kvrfr`X{!Tr$Dj$PK*fgQX$5R!rMkmWkg27 zbgHT>F)o6Bi?F;%dO5vHx=Lo5=CV^~k}6%ZCAX^{BcwYkmPk;1f>ZqK%Y@lrJN^Y5 ztxGQN+95;81Sd`=(Oc5!Fdc$MCNQNGhXo&$0PXT7cyFcRJv-bi;Uyh>C^f%g34^Rn zBXUXX8i>z8InEh0GMK{lKv5EJgEN3+Fg^SQ1Vj>w#5;MX3Wo=ZNNe+zrZL@1CB%1;UH74#+spjBF6nv*suTPW}N%50gxy#s3ib9Sf4nuVfdx*@BhoZ8mGv zcIzJ4956$OvPLBHtC4sdQL(x7m6g*i-YxMpb{|fL7|F)lrrdUG*)G+1qY!!|NbP#WEza) zGebG0MQv--!Z#!Tdzo$*FR7t)e`H=xkvml0R#cj;)xAqCO7(_yvQ(WZp;Q)K!TN8i zPS4vS`kmkG`zb>~Cg%<9i3E~k!JjA_gF9LD@<4PxXjgY*g1=kzX00|o#HItPL#SZWvSubQZv{pP#_ z81n+%Ca51|V`66tk~d(i(<29vV1s0i#1Lb&M|SlmHjlzp?71ye^wNLSJ9ah#o1k&eIc3dcn%@9!2W5`0)%YaNIaI z4eL#=-@HA`uf2#OAVO&@V*`qxB8@XW84CB|5aVp{wt7tj`Bakp(W+to=y)CUM9sF~;YTGf#I_!qh_os3^qYL)RyZN@k6S87|n zuGHSQh}=u$_e8cy#R3H&di_Jb~6qU8`D0*BC2f_cP-2w$zSzHIYD>dvulV-mJ5qRt^9gLr?jCxjmkyML_;F1!A7aF3j?P94 z?*ZTH@ji6uFgQwYEb^WB%IKI3A4qy+|8sV7>)U*|RH zkwZPT-z}jo|KJUJG%%r0UQ^yi?bXnPy<|T3atZyLQ0Vd$3AU+VngsW$U=Il%P{DKw z9#p~J5(sQNW zZf%%HeR*y1V~9W2rs>)|RJJ`@-SwoHeP60QAd9n^+iICWysp{9Ygf8liG;Df+3KAs z>pTkfto=;ZakX#C3Xk{8Q-H)8(S{UW9y=CL$O0FO3bAcO^DnDc)A5?|NP`~$95CM0 zBcJWoqj&7qF<>`;E2WLyk?pB^Br!~j)QQwe#CFOMPi7rS))u|_JF>PIhax+X0;r81 zXs!=#M~rFTp$}JOxMje(8F1IO@>`D$t2$$h{GT*N4QaRhpX6p>cV-BT$+mM0L*`~< zOga>PIrco2>x0+kB{Xl^T+JJp>-A|zrd9JVQ0>*c#dEVIav1N$)!8a`n8X%IY^jPZ zQn97qYtxJ8Ud#W$+!<0$arL$G2vpCYrXtqf2gCnETS9~NXy{_E-o8l+oKMzl1ao+s^cd^iCG3y{qa!Ylt&!p1#3&Et(6}@qK|o0G-9}o8(75(Z0g$H1 z5E$ciivKoQd@KXIn?8VjS#4mXx7{teusb|bzp!v*tJ@Xo8!%^ItDE`xyjT|93Er5) zJs#feuKsI;l%s;YwyaVUp0UZcV;kG#87q?{bCfKB)78}38akKo4cmxD@yjLLwABvx zCtUZj9rhCT%7!3$(lD500K!1CZZ>I6Pe}`)6|1HQ{+9-LwegQ1Axh$xqQJ@41`7^xT#&fA!p#GC z=`>$B`!{ShB;Jf1=s9~jTTNAhNJYjX-QAqnQM4e8A{b6Ahi%#S+zQmlYA;rPBg|+B z?^f!fHM=9e(g+_U{-AP8u*y5)jqpCA_bCqnOYewAp}hKcppH`n4{Iy5)=qMn;P?-I zU-5g-c}iG*PxCt*f3d$zEcIap?9K(Z2h8egON7>H9e-irS!;oB4?#13ATQPj5Kfkh z%JgFQ@<6+1wYrE9BAR)J)4Q_?e<&7Vdh`~;k8iTW<%CaldVL~2eHAr^2FXs`CIx)X z8Ohx<+memhOy`2>I)p!X_ z$tarpYBmBo?i@#6iDu$@UX`0P>~)T$9%8~?MBy1Hfyh@#>Ap~SnSf&SOc-- zM_Q3TI#J~>eiNIBwB%J+tVT+i<8;$qsHWCgRNJR(v&0Z>O%_(MN6c=g4fR79oRqW| zzp9&3!gSN9EWpNx&`k?hN}+<9`3KL5EsBg$*?8GkUd?;|}Z3*Hxik?9WU8bAO7~C;hoYwcPH{KT0^7xvvV05AV+_ zK*4GIza6#}b}qI1P8kWFb68l|d4^c29CkkVfeMF@>!6b+?3{*s3v@DFaXj)eZ)_I$ zzeJ{|K?mXw8SQX3ncq~IZ90`XL=v7jojx9MSnt1@8voZ}QA<UU{wEL; zeu)2G0$}!_b)(*u^qu?FUHlEPf4`#}?_mPhjrr^KNL98g%qnn~^*w7nB<;c@Wt<4# zm^HFem#xl}@cHsc3y+is{ctjQm?N2t`YQ7GJ4YT`@p?p^tfW~3xMDK6gl0lWJNlVZDWVB`_UzH(8O~Iz;+HPeJ%uALXO6GW(-cquJqPRZZ z)*6V;Z=^}Zv>#6m%s2t#tAMfJ`goMmc+T>h^K!D;EpN4!5+lmnS?rrPN^@B42aG+| zq#n|mxlOSf8N+#7ezse{T=Cc&swoQ07QnTdx7Lh(VNp^FmUMkHIybA26qT1JOID1fx@@ zs1^TC>z|AWyIG#W4xZ6`4U<&BI6#@8v0YYSU2Pe$T_dr`7Hkb6m^?qcZnupMzhoJV zwucp1N&)gVcjTRG=lxm6C>gu@KXe%%Ismd1f+D2Wvw7M!W+DsE#c9%aKccSY2BOu{ zD(gr1*>8M-ihm?)%iaFLs{_16^PjjAmr0n9rX{RT$W-5?8)b_E#{9*hV$$m+o$gfW z|5WLrvxyH$Ji87m{!ey%cBQW+EhmSlUE&=(gV;FKo>d<2&Hrl<&(T?BGJLNmg|X0( z41;`=46PEg^4ya8FSLb+SYctzUxv9Yt0eCm-3Ts;_p4mwOSF_2KQUmIloWiaMK_Ua zO!V9#B}Tii`MZ-!-0i*%34aBf;Au6w*}qxX*9;ij4qN4P@$@b7rB z#5m|co3}J(0L~XV#DFYYqAk3Ww_;--18`Z;yfp_FIk#qPMe+CjLAG&h#k}vct@%DS zhgZP238h^R6r_#KXv$-w#e1^J*AN-*(iSpIr7?|q^LCHly@p<*t&LE*$!`X;e6I2!ZNlLJ_~K=XiF? zm|`RNw(0qBkK0vffy_Kn?x*}C$&4)^g0LtoqFYMT_)at#o;6%T%TKXYSFf4RJu^D_jKBTj@%0*zHA_oTxaz2$%C{tl*_ECyr&QUU|*jNYFsWMz- z&D*G`S=NN4&Khf^?e`~I{E;=;VA-HoP!&E~Qo?P=$dA?Vk_koPxA3X8h0gQU+&Ezcn>ihBrsFBk1Qfc7l!=E}MfniL zvh67A1lCfqFL+wHQcL}o@Uu2Y#YCkm)|T$+0Mk=4n4TH0$zwP}@cG$pICQVb|BI50 z7<+RuW zyqc}D=)@_3idmD!Mg!+uF>-D3IhP5rfdVX=e+7W3|Eof`OC>RPW#oj@Wn52z&ETsh z=w;&)_E^O#&NEdt!KLHNA3lmPrbnl;Oz!aGw}O%_TKLrMs*aakNQM$h=ox>*I0!|Sa+!(kp(0!E)RK~#&Yb)y-z z1Z|pW%bWxJeY*A{Q5-CAubp$O{+QmL0HbiG0(pMb34Y`JxOk&q@hYe(Hfgv6`c;h( zUfC^d+~IgYpRd<1?6-oPp=0SX7`SVCtj`6CZ1I*3{ifUetk-^xMGT48uD8_d(op_d z>gU-3B(-p`tEHqt9>RD5RP;c!?EOIW70DEczAmK#(Pz?mz)Tx!K{?v5oTYTY^lVxR zYl%B&Qy>~0Mvi3y$9Ji3BIi*q_f4|?0n>r+FcAu6z478Id7m-9&f9pjR?=#;Mp}#7 znv1oPW~1c&#`JKX2+M@3BO|TpyvMzu1yi`LYxu~jm&QnwT{7E zbtxJpf}%c~+&*T0Mc|f_Is%L%5^f?PZ(ZKT5_x-K25+(5q|u=buOvJ49gwh|r<&Le zRFMSJ#|A%!RE(Xy8(T6bMY76yTIP%m&qLN#JkC~}!&O6O!ZqJ`|hML1HO?Fx-j@*;dy z{DuzxXIWRb6Qw6I|7E<+J|RSY2(VbcP<3f&s2+J;p&HJKy-u<@qGCy1R6`L3 zIfPXV+STeT?u!6f<5CXeLzQc%rXp%hf9^x+lhbE~Vvmfx+9>^bBPb8%>ezC=mnUNPop@-QAM64!@m<4Vt_#arLw!o>#Mh!+>kuhY$kMV@H)vR=Xp`=(y-mhI~Vl|PW)GE3@wgV)4%s2%`ExMmF4 za9*tujGRFmnuQX*ObKw&>%U?Ux5`$e(pwX+2&Y)1hEV~-iV020z87c;OO0IRLf;_; zI){HrW5AqIl(!MJD4^+k1F2!;fg}E6pRs}BYbz;j3f&bn!>pvSx$cw}M4!l|vQ7nb z5rRQtq$#TZ{pS2^nZ=?Hdx_Sg(JVo3V7@ZT)&)T-Q1FVxg+I{ES0oQRx?bgno*HZ| z(N>Jk=HK{%_!*DRIYJErr`@I2wP$s2;#;Ea(?kvg*eDi(qEJqG;VMjpO=8mR9c}v$ z9riW5LPA#Eczrn8i{1N;*5-Yw`eK+BGog|Pz)~Ib=kpZ1RF}v8hf;a#6qh^+6Tj~t z{EpxKq5tHyut+pb4adB7Qr7sGVIs)rhGK}~!TXGox&Z3H;7t%+s#KuI*`T$BBZ1uf zd1~?CABzXa+tfHs^J&Ff6(g|JN7aAXA&hQDr92Gbu)^@6Hl0XNBHysLs{V&T`g4wm zeZ@A6?{)O_MC?kqwu4_pv^%nqV<+|`5^MTN_8uOV8BAs@^9fNS>bJ}pY)@lbl2cfa zo{%ZbXKan*v-NQ@LyZS8G^3q zIfK`LIgAxylXBda5#olO=*LFC?V)fltEyN84n!-?fKAm=4qMHxk|jknbJra*7kOsi z&N9Ggwzce-+=NC2Z?Y&lyn+TSO-yQY1@L6yd(-puFJzlbwk~+wO?HvC#>eQ4%*~gb zP4QFh;y3(4@e8^XSN&<%*cHUl0^)PKX?ZN#x-{)+Su;d4ud)7m7pxs4Yo`dSL6EcTI(4YZs%7JEq`iC9 zxe<135}2bZoQmUTLo)prDj027a7Z9@A=Md_6NWkrVOhttTZ<@TAJ?c|E&6Vkg$iqw zkH8hww(+oTC16`(2?fwKOx2yZ=QO`|a6fkr4DKo{_@SnAlVq!;Nj;r57L@YprwiIR5 z^Ys5F8{L;iV@`l^pEwJy8zNO<{RmX-8E5uyhiqE?I2n_isz!OOHwnkm$mGM*qhc)< z5_ZSCXQxX#vcD$PexUR{&n{J`T77EuJ0(sE8s9;RZtKw4p$sual(2pdXY0-TS-*_% zkE;kAN~d|PVvo`4GJreEoOdXLo%?fUdujg7+oeTm`)F;rj};g2qHA6u0W{iiX7M5K zJ?V99&F+>tGSP&2MrN-X{^J8uvww*4w0}=qnoeTfx^O(UPUbBRqIX(@wyD~cL8ZeY zv`3n_SJcc7qmjxMx`lHV+Z;~QqUS(3;iQ*{qS205_vc8~MCKF3DQy1=7Av53q5j$7JBmz{9Uh3=R91FgU#C@AlSraP{8z*cGkIq6sfb&u2V7a0c*Od zb8xcG^96Nb;`y#{CS2Se>S=RvJ5(LF49p5f;g|X#umUW9{n5*&1S_g0jf(~@x?1_KdTj$+9rA|``j0l7X+r8~{xiQ7VZF;9Y{Q0h zT5dE>L!q)_2#!6&)AU5QCI!?q#;W!giYozCH8n}zJ!ddhYsf5vvMwY6wyML5eL4+b z!J*0krqxYU;N>WRmB)afQEJ6_u^nvWQ+)Dw7R{7lH`efDPs(bKDSAzdSinV_bRLrTP@|pCb^7&xCyXs-jir{ z#-lGFG^ll`U6`ekKa1F?q32=6mEQDqEAyrvYP?b+S?+!wuAQA~hv4*Gm?R`R07=St}{) zXB$RNieqXo7#=>7z+!(v{}wgO{6_zlK*2cHSLaJD(wX-W7;9YJ4jZox7VKu72623m zFWmx>9)t+5ArxFz3l?LE6W|F=tCMr=Vg}Ue#BW1M>y-pzmjO^F(rwnmaE+qTD23K) ziK~@3EbA(M0P*ZVV3Q6n!A@QS5I9z3pti6tOb+f#(3h9#(c)IGZn8yy`{aBvM~}QG z?UcHv&@uLUaKw;MhCi`-q_$LIaWb)Y{K}H`w}LFdoKf7KRbqyRDGQu9dMunv zG6F9hycTn&HbeFwij5zO3%;K}#urUH5pQ(acksDeoP7rj{IbRgm_ytmMA#z31Kzp| zmS8qWA5}AN5Z0s$V{TTJ#*ZjA4kcp%V10sFOZ6crvXQ~Qsu^q? zYZULv8XLX(6t`>?UKp;?>Qm{OfbpYsN3J4l$gA{yHtw)E#O|UMl!jm>>|~5!gDzox zBNT`<^;26x#uxT(kgw+AobZXhd&am&+*w2r}r#XB|jGafw zzs)*Rk`r1uCa7ieLb6sTx~puJd~1CG8I-IDxxG-KWUi^n2vcVe5z34 zH0olz%=G;EDM=p=!u_sQ1x%*^!wVyNLh9}?v+v=J22o7=qZD(j>T)|yH*IE>)lN?Q zp>!-#ck}yS=lQPl-0VCLInSZkuO;77=P6tpOf?Erl(4^Lq@fQDSMD{ z03AuY`zgp}yvM|KR-@oMVw3eD2S3;^iTzc`8!aqflDz5QC+Q-Q^!F;l|5vvi2(7x` zc92>Dl$QT++W}MxZ(nYA>^AG5S78z#7&-7$XmsSjq0ofLfkb#VER?qiC~TL)Lh-T4 z8!=6L=3!U~80T)HvG+GfYlQk%c%&??m8D%BVK4mQBULt)(Gl+htS{~N24GoV=1Bjw z0pXHP2%$GDzqH--{O%9(5MDT!(o%1eHIE`Bozx}&!uUJM^@ejd0#`72eo}>{19rEK zDa{7qVt@dvKptB0ZribLC#_awu7k{4uICXHaP7#VqL!jkQe1iK+D4T&VKZ#r;l;d; zx!at`2|E`uJ(N+q&gIH>b&U&Ww5?1F;jeQihgEu!4h%7o8)X)>wj5fwSc}xOk$Hu} z(RMgF4x(k@m4ms&%q;3m2<@^ipj*4_|2X$i4X2S=9gSR1Bfa4P#D$(rQtdLKaqTIx zpo)o|$Z!0_u6PzV0Tj9BTKhG%CJ~yA zp{NM=jiB}-5^=l!m|yL^gs!K?ubxQOm?s=^FHs3QN-bcc`QKeDIIwU7;6DbEB}An^ z64_Cs>e~Qy`CD2<5pkAY@l-hzjTeTLZs%s2^M!CM`OfhtO5nbmvIEAoMcg^TQoB?z zXPRtaHf*F!gX&;83_g%Cre=SSrOfd+O%pS$YL4sB;ayBCr=Z+`m;V+MWa>bQ(WO_ZFGe&BE{ASL6&=GuCH4mmKRy z3RBBimn#Cqgw|3djbY^4xBBoL=DZJAPa5?H=*T9l*n^(X6?r8-SMvGR zj=9>Ui+1N4?GND8IA&77D7*z6I3OO=1*K@tUi7MEXk(aiZ{3@L!8tu#X6pr0m!O&D zhM#WhBV0wYeL*+a`01n`2OT}&|zh73txFJ}JA)-Q{0FZ)!sCfSc@duvF zo^$h;J$$umii#+CSU;2)(@T?mz_gCDHx1rNTNiPZB?iwow^O70Cu33$D6s z%+pOpdPVVGtaoGdWr-DuL%vhSlxEV2Zl5uxG-%-1!ZM{7`K|hs>;}|h)uD9Z-5UYV z0on}dnSoFP%SAtV;PT}+GjgEOx-uE;m}RYD6poD)5}AdUQIK&iXoc^DL?1qzWX=$6 zl_6L4UovKo?EkNfxp9MH%x+|8Bd23uoDt()}7yuIE-;cKlow#Z)8+{>K|ltX9l+ZrF$D3scFwW3s99^xEldU`xY zP_C>|G}}gk-&pTg-WdA_2%4H`V+m^-trZz%dhS&vI#iH-hpQkVa&}tC9C@zpbD2Tu z@lI*<1!OdiFQ4it^+wrZz4>zwVse2mG3i8%FC_~&R1+$|nKqP(Ud={18|;grM*x{{ z+=(D9%N$Q)v7m3y$a8E#H^X+uk7gcJjm5FHh+>YY7o+LFV28MP-p78Hfz%g;)6BwC z*VA~j?};!{3)$2rbpy#?^p|AM-JSJ@7Fe~E5xgFQVzCMY)X&=A#h)uzHq-5$^7Vn} zfXS@51Kh?$MSm20%*-v1+VxFlkCVg3(iirQxb?iW8>6cX`J+Tm$U4uQ!{B1|dEp!c z%B#XLX32W??ow?JVJk&2mJ*-BRp_(Uo)esi6)Iw_Kk=@%KsgTsW~_PjKJ?R)rt#*j z`&bs1sKt?eVweMemr(>cwO%GPRshynE9|Y$NyHPUJ;k{*<#Op?0XJVEcA zN(ZR6egqvD$5QQPf0d0^S?>uXr+m!K*e^1!MbOpA9q?eC&8=`a>8i; zg{W)?Bc>3MfwF@lYr>;|9x(3AmJZRpMMc_?ql$2Lg8|0nzUHdoQzkegj4LfRW=)EZ zrj37mSkRK@r%D@NJYGcAH=#a?SZYmV(U%=<+M-^PZTc5)wh22Tch__{+Q#=L}5TXxc5`}*DP+9yM{y!#9~?ZrdAKPy2Q&WTdV&c>6w<2MrZ^F zS7&ilS{qza!+6Fu>qhLP`AaXSLDBg&%A;~e(AaI1z3s!s!xvQ)f3RZ#fo@~A3fDmX zv45h~-v}LrUAd>>zoq@1PzpKOs009Kbg{uw`|!hSXAk%75`0|m!K5tgiNKddgHj=6 z@sLhKT1J(+h-NdJy|iWFixgA0=`AbaP*^nBWCh43Qv^~;O!{juR?=zKiY$97ej6ax zbG*>=xiDD+9Fbz(tuP#wty(5VLA`}w$!`W$XK{!;JH!^AKRf=Bx+w{v-#&#} zqWK@l1H}cL3v8T4YfY-F8DLDxiz-D4_fT#Oq-FVRU!-^|Z!nnYF+1~U_3eT=MyKxU zQ2ag;I2!0UPDDq1aGKD7#ZaCv!RQa8Tg{=2vWkrHV%Qj)++`)jafoP)LymKL@zMx`*!j z`FaI)24&eZ)yd9K3PjIrd0hHnz)2H?FGL_(g^&|OPQ@vbmCBkdED&K?;ZMC^su|7= zMb-<)2*qPx&LmnXYUE<|rJ3 z64uo~GDfj9xsBPr%l(VL5jra#8>Xn`=vKBmlhs9*TUj(pgY~XQS^O)%jm}fT^4r4i zJAOy=Gx=?Eo)VUy+$Y?zKZt*&$o+QVR~!Ls=hl0VR%Fxa9snt^ej({1El4Brl(mY4 zsAl1WH8zcZZ)Xl2t)|+HoX~JlO__6VieE&w)9q}sk!g?k{D zPta(O!|uDRBj^FAJc@t2^&?CYKe5va7U#WV^81t*n1|cLIyOH4!~B-zGd@rrq-uS_ z?6lr`Pin$>mt~f)&uaC$dL;frZhPU>^aMCoP=%7$^-lk^6w#M-)&H?t-9tNR1F}D> zBz5|vV&j%cI>!$D6_>LT!u>NLZKz;*8Xc0VN9U!oy~UunO~{{7QS}mk?-YD3WCw_LHl+KEeMFIXaH!;9`#)pvuH&kI%_NY5@lzQ>E2{f*m#)vi#-9wrbjWapj)cQr;_N_iL44lJu}O>*JpB{JX84~; zoV9yL6RW(D(`qQ#U=}xLM7H)$PPDWNyb195L~hZgsBeSYwK}F4U7;TU>))* zmQWij9W;yWB^`5CWVPO9tTOd45oHi-fA^G_`EAz)!q56|IGaprU;YRD$84Rd0VS%z z(W2*L%@E~9)GkpktmQ{KYKnn$H0Wu&+5bS z8Yy`qQ*nSJ7c9I`U1@b2K7Up(xEc5T@G;^+s>p)hrAoCdxQBAN@YUz{fPmsFnFFkS znh1wo*2mB)?y*+zip|Zw$-S!)D#=eB@+fJ4;(){Iph%v{xLGk*?i2SW-MA#%ADzK? znZe+hp3dd7xFz6bi>AtNd1gmKEdI5*sf$;CYki>kbZ^2F{kDbcFK zFZ0r-oF2*+kNqQkYkOp~{m-~vD+M{ng1)XNz#93Vu>K5Cy<(GOy$Z49NW$}o^{19W z@G=_D9B6+bd|*Ap#4V?GKIu8N$GSyFv= ztcAW%mAaWf97y4%@pT=DAiA8~+QP@l3_*j&q4=?44_39SMzRYQD45zBx|66z98tET z{Ix9xd)cwB7k75{Q$nW+n#Z%p(_;wD1|RfOK`7qG-cM7NMqDY%7Ygb4XZR~nv0IFz z579WqhI_5^h)t3j3GP%Ku8+)L?P9bo!C)#VSFsC}2+g3jp51GcS|q6q+g-{~Ngq1Y z!6sF(AuXI6=MTH86)$U1I?gcC6 z*{>3%X%iD zU19~@KR`|d@A~RK@p)_;I&W1oVDb1_ovN_%uN-Uese;Iwp8m*}`-BMNjazpQlU>Nv zGlB(&I3TL51fkto>dNFaj${bZSUD$3gW09td;}eaxIA01L8osDKc}{r4~19bswiVi zmUrjVg~@8fz#Ti42-U_|e?px|{Y*X}6zVs^+P@1>PjtWnd1xh@J>Dn=f=2U z+rHH{lo2q`Ou3txt_M+QI`tbv#~97EJM5he63>*xm{8ihqzFf+s?m)_;Skp>EVztF zrE62yA7DLp-5-;OR*dNP{HtEErRa)izo%UyQtlfTy6nKm9;75$HriPzo}p09-#JcD zErz@;Q|#Q>EFS0$0y0%|6B|cg`-%YB$L4?t`4p;8_KIh># ztFx^ODUQ?+M#tm|T^Cv;^^ZVdIp`OVLoG@j~r_mDm4g-B6SB~kpwxjI}Z z867EF%o3hTccLnpF{U=fPhlS%A=sp>uVxT+3E%3(T9)OxsviWJ%vwxrr2W?G`zH}D zHJ-BVaAzMZ3TQ7@LjT*Z==OTrFVekg0p~&n!Ab|ywBP+N zKsm;y)r!5^X(EkQ=b@)F;3 zk<0!?>VJd!zeN4#4jSUutN+{8e=c&VPJ_`}*bwBZ7fd4?LxTctd?SKrA{qPj=FdEN zZIV^)e5p?3!d0$$DcBqSI#{rpJJC>|g6^LYEU6zLkG}FJ)&vTyFqfp_U(i9v`el@i zkB9DYzYrh46Puojl1$?c&FB=N+27f|w?_1I2-e2#KRY(yNEb-Id>&T)w4%kcdN<&YtY$@WtAl)csBV$eMFM#t$ddNZ6*zXBFvnNojlYyKGX0`gNvOE;3pNxr&G!*xt=4=8e+h9yKuPo2o_`%^PC1J0|mItQ`ENv7vUy zIs7$$l~Vhe1l&Y5bA2808hG`8ZGzgB|NH zUS%2kW14JEtwtw6&(k>w={jz|xmBM)zQxM;?l`vo`M90q?|V`wFOJ7=tINqC184r;_!l3hT+ z>h|RxoP$cC7WNAMK$6cKpKZJu`K*8B05MTw;J)w;oOjUUw^@Dq#n0aC23on_-gaPCxBtF<*31+6A0&n+Ciht~@W#qhMWQUSzO?q?!;_M@C%|e_dZ_V1M z)%_1m;npl}h~l6f*U$9}_rX}FO&>7deh#YV$(>PELmrX;p-1F0ssHx&JPH&);+1lN zNB&>kpSLm{DS0S-w{w5qM!C@H5`i;G>YqMfeWoC3U!eEC3U^$OH~q}_RD^SZ9%cGI zVrT8h@*i%{do$jb1yJpXg>s0dNPBdx?%u1;;kAZ$#pL*OS9UDw;Kz>dfr&ds>a;A3 zMU`?n2N!30=M>9+0c`$TbX2_aMJ(C&1}tW>FI(#7&iv5>;ZxhU+-UZ{_l3XxeM~mN zg6&nsw!@loO=dvSw_S!u?BhQr5>Y+z1`Ob`r$Y+k8|ZJ}=1;th{w z?mC7e-rLE=dcJ$cXA|?M;t>)Z#(r!vWIt;iWR0*jOR*a0FAN#l9`P8kYt zv{(+eSq+jyoJJ8is&=sx18srCNAq|1B{#mGvVY4IWqMz$tI!kiyJGe|8nc*T3g zK6x!Ds7rd)eXdMHtPy3yHfg?p9>9m~COxjAzK{*3w)-X?*Ub=Pm zD)Gk=4?Q_Y6r0FD#Qwaq6%p>`Vf_es7hMXM{)M!!`MuygB`m+E_;qryJ-?{=*7}W* zD{|*BR|wpgToxNj-!9!X8<-gj(J@M7RZQTcYk9F0%p$EbqiBp9mc< z4en>%3L^17)>P?kJ0-kR?H!PX(;ixAVB4q(ZgfZU&RlCNLk%mJ)N4^T6xw}>wL~#46R+#n1MD0s_#0pE0 z%@$-kqf*q;HQl+6tCK<<`bZK=CVO95Qj;_S&ng?xKw`6%UMEe=AuGoJ_e~=M!S+#xbtDGCDQdv$P)jv zZQrmo61%_;H;4rqmBL~8I`V?D{ap&k*Rhk@rN3lJrar)OLM7hUQP1@CS&qx!ag@cGFi!HWavek=e zznpl1q*U22XI>~TzqMbES1&j4V)MTk9?N2rI^J=b8z!7NCJSqbsC1s4w+kD~%>2?A z_WIa+A{0ABYU;kFz?enjlr( zD^<~L^hY+{dEPftXl7aLXxh|Ge&Ax}4r3Vdd>2ZhI#kLBC@=2F)!AyYwceL@Gn9~L zbfrMH`ALY5`6_UQokh=99Az&Ql6o45y*rHL_QNLlP( z`?_AOqwF0bA3Nc_F)K;1GJ$tO7kD7|CkNQq3RsN*Q~clgeJA?ZW93O3k~2Z985o@u z>5^aFNT8;w#biMcqqR%%y;951CzCGnNc=Y^a#zP@A~thP9`Ix4ow++y8GSo9g0ofoR;ZPtyT607BfWdesv zA{@Yy)&t4`KUP8gXO@86^wSVS`A1eqMNP$rejy=^0rAY88P`Dup4v(R6O#*!aL3h;3_+8(LS za)pNZx$l6%^;?;spW+f8dAXKSEl#OuzDMSf_enzWQ0FpvK-yy|K&j5vo z?RHTww7_ft>7Mve$nPeu z9$8|4g^P8BR<8`O2Dfr^uax*zqo#}YEc^WCoNQ}y8(_KTML^d*3utyXmVk9TXG+ z3pQzWSChtlWZN)0gcteUTp>{}P<3nj%^L2ZkV>uSlXm;q_G76yzbSUC(4%1A z{9&-mRED99?)>309`QGu*1=44I%kRK_uqYwh)G4^r-_U0{%EnrIJ`8NVK4iX%%97& zh4@pjU7Y;rRVw#hFi`W>#lGbMyx8j);;f5+CoeN-v(ppe=OOz9*k~68-CtT|WTAJL zAt*-w30IP18C}KrbUjL~#l~>Dsew!8>CyR3O0C#oeME{ihkvHaNEKwPqXmW;UM$-X zJww^j)gu>S6Es#sh1S?RHaS`ala+*(QYa~WmqC(@?;4A=`UZm55fqF4UPY>}XR&7z z!rKYAFeB6o5}|j8IWO7pSBhTnW_W9mah3^xNWK)^m_$DKk~XoxoZlEMcvp*xYQ;yd z!*_`kC8Y^zxgHS{hz`n&NH>%f_#VU4cpkR0C zRhw|G*kpQ6jRLSRNw_+f}$%yQfM z=f}d?;xkCM8lKnfbgllk@J)Q4GKR{P*6cuJ$I4WoI?MWgkK!&`C(O54c`r_GG@7j! zwhQCnWx&9NCg{9n4Y7Qz<#wAPf1ImMGS9zNT3nzEz($O+^x7qdMG|J zU=FIHfLIpe>DCebMY61xZPa6s|9^fs_3EQ4Y#twa?<9r5dKtB0Ko6Ef>en(#BU@;OCYzmbo zmu?fqc`WfAJ)lG}*DD6)IE?!pzF|}f&v4}F#!AjSY4vGv8Jvv@j!Sa4t1j<~a@H+6 z=tam9!yA_2vCX>v`2?4ieH434itQxPZ(h!Hmp@#L zhqfgMyT5$T6UP+HKuG1ou%ZSi#07O>p~ z)G=8B^QNK|!>Gb~4YG-5$dRY{z+~q6y~K#5mdws6o}aqdj60UL#;DwwY?uiRIQL`T zUYc0P_d94=EwSpOmY^vLgp674IKPh_wZsmLM;FDmkb|ba2)&m5DT^ei@xfk5LP?{_AshE^^+WN_Svc)GT1n?yg1WxXXfP7Um zwumaARyK)6^B72q4phR>N3p>NX$DRaIES|sWs|du`%u?KE#JJ1_mzJo7;i3$_mWRt z#tVGrMfd1f0Gb$Y3H`n8#s>Yp@AUTdT=T@YRRG6+LwA085`_5}X17v@*8KydUHW^U zQ`83G-UbML@HPj^(T7194V1&8&XlNV;mz3)lG{soSe9^FwD5R4Dp#UDAd0al{%s>c zS?+zWa-PY1;AO*EzK$ll%dhg2ONHc_{2fI&dA_dec;n-I@|DG39CEm@+SZZ+>HmAb z1}{2bqUffaz?jEgyxf(0;IJk=4oYlXPrR7Pkk!UEtJnYHs${HTY=y4$MSgOJnR4tw zRbYb*KTrN*$v+4YdissRRI+xzizw@E^4RxZ$*G0`QR1+efDkoXGrti_$Ql-|T*4^( zo?SaXU>1H_$;ia4?*a&@Z?X2oZ3azHP#UvNS@P35~xmSx5tc%NtFb8~13zRWNt9yzmo?AESALEG|wS9VFw9Hw~XV9XVpNfb; z@t%uPeSxaKm8E|)Q?84V`0EJYbB7(CEa$>i;dh0NqnS_Hg|)gB_WOQ?DBHm=+V8K~ zK(x9)*zZ0Y5a;Z94;o*`*#F>MtSq{$K?`64kx^oUW;$Uz1l6?2t}h@>i)Q8}>lPHjgOPfsn3Je1&uJ8@%$|7@w**V-U3a9 z%H&Q|n(e0i*`}3dZ^A*ZWYeCq)87#LkKC$xzx_TsiAt@&l+zyRq7t13E^;Nax*j3} zCPG#PjW0Vh^dJM6+h4^MLHPrjVuD!YfJeJ~nLuMDDVORRIDzD^mFg<`KEj?x;xj?u zfhMj75EH7{5ft1d@jojAcOOOtn;6#9s@jhkNWE?VX$F#cl(U?<2STe`d46J6kiuI1 zbZ8a3o__-T5rBu!hv{ylB%k~y>LID`oA^KpKP=%LT+K=NQOTMSe^0^>N<3=Z09`6f z+Jm}|L}b?}>S*AHp**pI-&_1X;-~fpSzj_qw2j0pcQT02sjv~Z7hwl4lr@u_8XgVW z@lsiiMGJEnY1T7rUCSsg5XD_8o|0{^fyN@5IElh-=r}si+m~X``0HxrR*_$IRdrC z<5%OYS-y_e?-|9=8|13~er`4HHZaph`>hd!6ZZ(B)AXAl<~}~8#I!J;m0WY^LV(<= z3N~|pdeqaZMoBZ<4^hv(s$es>Iz>HMs$g^Ih~;#6qAa!mSqBXHEJAjOPVqxcbOb;+yN`*JsvQh})Wr4JK*+(3{(;A zcQk}WZL+@w&POle!M5h~a6d4nh2e3^YQV7J>M1c|UY;dQ!7e4u;**S_E7Zq88|j?y zHE)W1s5?Fu+f2Ntd}Sa7BMkzyqx2^h0_PCshPv?td{hI9el>%bD z0|YkRa4fWDzmA7hDx4lIJY+{5kwsLY9d!y}a-A(vrxVqN9^#{?`Ust1ytpcrQ*Nl&gP2R{qjG2hB-D#`Rj$;V&#waW9sOnJRhkYuKcnZQ*msO z-6P{e>*-p1z?L&?WaiN?^enrE^9A)^xG2gPwpf%R zTXMIS?Gw7}Q$=HQ{~zYwJwD3fivLd_3qc4^w4(6>Mg#?~L5UI}nuP>*A>k${Ubxr* z^@4)1s{y%$CQ;UP)wZ@;wG~@!>BUNGp`aBLP!jM$@QOExSDx)wMN|S}$?yG{d3HBJ z+wb@L-;dWT?DNbsbLPyMGiT16Idg_IUZra1PK{sB8~Bg1O`4=8-3GPVW>$+nnWBkr*AMewzn!ni!-T4>aq-;jDpQXK(Es_SQ~=P;*S?q(O4XIZVtW z+{y2o?sw{{ftw&){B`+gKZwhDC9K;Qp5@=b@Zbb_*~EO}zD0efbpVnF5W`>lS|~j{ zGW9jx#|n*hV6So~c&s%*2p92*)Xj=a6z^MEcU{lAYeYVE{FilCdW35} zO-=4n?;pHUtxqj(!0qee=Ci zX%c6X5~9E^S7ubJ7*WTRJjmkIV&SnCqMyNx)Ov2ZR;TU>($H(nriB)eL(;p4@%572 z9?^^GJ3mAJm%p`cxc| zi+0HS`<6UZ$*HSNE8cdGfPC?1XqCI}z4j>Abq3}5S4d0Q)G@t01zBK&Rne01+~eTX z%8k`t)5j?W#04E#eC!kmdo(@-J$Z2bbhFsEnw+{y88>dH_7!rdD*zwgRBFc3y9aOPSuIJ-1Pu{Z%68y#F#KTWlh`jA?Stk2{ zcH-<&-1mX6aN8@i?j)}AY8}6beBQ=s;Uq{7f~*Vw9XIy6D0xyPe=p34n|fK24qZhD zVsjv3y1h%IXNZ1S`twD$x-Iky-RGTo8BX)Skn$@`c|)wwmGLZ%jt{Yi(He8tp!}*V znd?}o3Ow=9sp0qJNiX$I02lYqD%QHA>^OlPlm-h8sJoPnjt;E936sT#K*vQ;rfKaU z#`}>mEdW#g2ik+(V{8$+TKB29bN}pmfIf2P!IvAYfVQv>zTTf>) zS%7%bY!|wYrZB6G=Q(4JeJ&-&P+o>&NBLf#Y};&0zNL~y?#oPT5GC{rRw+NkDpWP7 z9MN6QwiMEP2=|yQ9+sFQYf&L)KyMAYh@xilSx8~sfyR_t*quc8*CYVwI1m~@*(H6L z2N=*(xSRHgUBS-N81#~9tSik8dVfq&_+`GUFiHSe$_ z$64I_$s?&HYN0o>mwB5>dETU;inGic>iZ#05B&%lJTl4BAXa$#K-zzU`!cGb+W(x9 ztwVi7$QizpoVTQMKF=6o!hY1t`Db#n$K~s(oP)?&k;=JVIgitQSp)YhIfGBx9-gR3$0E7!8yn1j4;cQ zvc$)dS`nS813%~M_qYMKIabL0Zi*;K$?#BRyrs&&~*6!FAcQNi`62!*lnQOeA9uUTLPtE)pX3?!9j+u_0-KMxfXAB46TnGGCwqD^frf34``k z4;$2J!MQpXz;43c_~+m&iTL1sz(Tqd$`dnSAyZ-;l zpScl#=KAz~;CP98_kpYOxxF(DUe=x2XP#5O%5%@kG70JXz}NrZc{6WIoRyxDRi}g&9d*R)$DX(Qr ztne(E0GWtdUxx)C*A$tmB3I$0K+w&6dKi`PqY%*rcjLrY5adiCtBFH8-MR@Fnh$)7 z#wD5QkEg=6M0xf7D3!E{BsavhJXRE@tX>`b!90Amtr066O9`0|UqND3iZL}PFU;g2 z>1uhCQe%Z@kWs#v{XEpUrdGRdV~(;ds(jT>7V;N5NINv9FQBB^j(2~_R~7yfs8PWR zK5*R$#ozC5p}x!qQ}sXj;+X{(*_y0XnTZI9V<^bv8H#LTfq{8(lyHNNS81M#G~$%? z0x7Be*0q8VEByVD;27&Q572OryWtgEsNA+-qD28)qpMq1K`(N?q&E82xwInDhkBn7 zpr~(Wg`vHhp~m7pbd>bH34BXn-)?oE<x!B`h^L%lFDg!7#%2p`#mL9A(NyoOAe5mN69 zcxQ|h`b6x)QhwZF(DS?FP4dl{SbO!6y%F;%$E|t|*U;cu=GK>I+2?-H{DE?wSrEqx zmw=pmUkbH=45S62cLQ=b?AShIplCq)N@bG=)yFcK^E{vDn+))#a5)}ZX!)at1R6&b zz#7QpQA5lxR=>qaARA)`@mhqO=kWN&m?WaF^#jq* znv*nKR^Xn+3)#<4XP-yaRaD(E=Ak=Gctm|U$&9wY{wtnS-LVUayDef#y=aImtXzO@y-ZNq;KIQs1Fl!1vh|kqD99Wj)DK$PN5qj zTgxMH7QJ*Y~I-9SOC4N1D=ut|(PN2*- zR=#qZStsco9$){mtOKRipH6EJj4oOxPxOui$hp42oYk|Z5!M(2C0#rn)$?5!Pq$S) zRdXxW{TCA3)B6<+kdKXhkZlT_4%uqexX~8G?CE0j?co6tZa6w%`L;V_Z9pK3_GaBI z@TtIrGyyNTR~DYl;^zpIA{4$$OA`5*hR_Pfm*{%AOOsndmrMsnLn8jponibSofXFF()4>2^2*T`gFW+m7Kt|g8G|^yr5v8~~!Z5$F z!SwS0aDkvx$Erhu8=4PjX9s!zI)t=m2PyoLXU$w2$=c5NbW%Ix(<6UqZ+voU?-SJj zX@Htaui81{JsD@1w=Y%-gJs(P*#HF(g>jxN)P_VO>~u2DElBqQe%$VqPD`_-y)ETm3#;3v&9P1M9Bsy+ zoAomGJ#QRLlR=kpaHht=Td8p{=CE;q&zumDcwOnS;&u6IM6ZYWNf0Dq1T~dF?0SU`#)I@YuXM>_rSG$)7unL+ z>nG`Cu|l7;>)BII%B}9dcu(E-VxmragS9V%%50|W+0GzH4_x+g4;#2kPtw3Oc~1vg zQvbzLf{e&zYf&pU80T1iVGK4Zjp$e?t_gQXj>@1019fKusjZpDVS<*LknS;W|I|#K zwx{}=p3?3jvuCtw05jeIh|4JQ4o>y)6b-)S98(4Xh;SD{VxfM1ECTO+!Oo+H^E&}V zk*Le^z4Kj_cz>q3@EHnuT-bI`32hMtS2dYm)BX#|o@zg$nwFw?%$ve)Yc@-`P`-JH z25sDflj-3}5RZ99g6~t+6)XO#kClTvP1WNANK_Os0`Ge1n@u|MGgkbGm-Hk_X=-`= zZR(CLq~5krPj$wBqi!lc8pS(}9$8Yx|Fk^Nc>v8pB2jsC*^qW*8n&f9nSNm~GzZm^ zWWwUN&rWVKBgOyR+TOgDwik6gk2XZ8rw^a$JX%4=r2m`qX!%yWWD2R&d5NFUX=#dC z{*wN&d>Oq z74GyS6SvdTjJ3h2_aoRd`-U_w@qnjMx9&lQA-KUWY?d?JL3qTF=MD12F*Wd9_yEsZ zrrcrjEPRFyS&kO*1rOoani5tzdmAJ!^8M0-aBjE_u~&x|KKH#ycc zW!CKP%;Bw~gQ4z&wh(BmGtOCxF7r{aXqVr42w#r3gHHLI@`VjVO)c(04Zos;CdvyJ zQohJ^UDiZV-z}xOw>H*mvCv9JEhLEK7U$RXk%q*n=J8%Vo?sq-u1Cy3WO!JQzE;8* zoK&|(o5sCgV6@|89_{@$52hJ^Wkw6co>5EP$MkGnaF#!}lKN6BWcv1La^U#dc0Yf= z9Mv()^GA3))V;yk8p2c902Ggu8zuTvQSBeR!g(&dInA%Ex?FSRH56QIZAeDO@vZ(&>X z0?KQNgC3YO4f`q2VfIW%$_pDG9*K4|uCaaIM9!X336J<)IpoS$KgA z%zP`+Hzk~!xLQ9al}8r^^nhI%g+iGK5|hkTRa_CB&E`?@g=`aYh+0@n256yry)5mkFa54Qj*jxG z5hvU_GZ*FFFI8!-_Bo202^=2l^@1$mJYU80X|huDj6JMZLN+CyOL@L!f_)x>#96_u za7c@OP|R*T!pAF&;vQSFVuK$#yBET0;_lxW?wx#pg2{h^+}8?Nf8(|kP=wybvU~U} zI~A-{;RL=77QGjA#=eLAAZpdY@_`2fMX!cWsOU^wh^RetQf~(Gk`?Z*df4SU2KNg0 zH69^hco-NOBY}>ExbwllVwXT<`bK$Q?e5Gk$pH=X(s@oYpduF3@hMOb3Djw1vl~%U z(rXV*F128vrnh$|$Cui2Le)6Wkbtui(c(LX%E$Hst$ZFKRqF|Ds4!)|Q&r4p%uAG* zNoS~U2RTKdocnmt`2^eN(}b@^!0dXOUFzP#4`Yhx3R3SfH&x}c_n*Lgs~hZV@X+@L7p(e*sh^&;(WB>Wy- ztI6tlLux32Vac2!tId!uF$OsWCi|EkDL^U;iB=^R-vH zBI`IfI5Aj9(IIhtFG&3L2D*vh&^gUT?YnUc-{{0g_-d=NTtz1&tqAIfdOD(sj$p3X z<#wfA$w5}d%?5a(MET<7az7AZYnC);>ToY5i59_BULN_v-N%6vLfj-Cg5;wO77X3O z-NNoNVOQIcfAP+jN`;T11fJXuHgh{UzCq7)6mN&i7wMR8Ct;!GUs9o$_LrwOFLc5n zT0!uD)744#%VD7tva4?$pAr6HqPyFLJmzIGIj59Iub)vKt(?aqy{bI=WOW7rREay3 zHptv#tTAVsmyh(a+dbL5wCSbIJ=(nN(@U${m6t&D>D5UfvdBQTr`LH2y`_BSxpMAK z9O*7GjY@tDe@MnvhnKAHSaP&2Il`9oKAMu~JqB@4%{Of@8JXIxDHTNA`&C1~6pLmJ?l1o?t`7gUfT3T%C3jMe45_dN2jAWdV z98ylL(}e+6y<`GcwlPu)K*0*9cTQuZob@!j7oXzQ#9f8{&1DlZ8GO~K%`0$FY~va~ z&U$4t_~Tp25{NBtdkch$+d5@r%$M0ZT4;gUgOQBBzXDU^|0$r}ry^W3PMoWyivkUa zP6SHFxrDiMHkBe#o!VJ8O9(49{ux`6^Nq9i+=fqZj9io%XxN<_sN5Imc8Ir5yk!Tv zwK{7jb`Es=Mwfo599I1-bMXyv&5K?LZLH}l)Etgy`gZ zwBuZRDSe9vM=txEOOGLv9xEYktXBZN&I^uU3vElHgumI`1rivD`B>pDuiBdz=*(6$ z5BbT50m+8~V@HaWcmEc{Y8JC*_+{ve5jr+$Z{DDNX8jt74%=%MyRq^jwxrnR-V;u6 zHLGu2?9A%Xu@ebouuIW^&i2StdzRTp*Jha9@?R4DVp-hFQPw0qE3)6YYc6CaPT*(K z!{{3hD7SISejssa-C!G6pKfw$*?kam+%q;hQvsa`q`Zs)-+wCClr^1Qdc9C*gw_VD^#krfgC4TT51hsx6ci$$h*&Dk+ za^k{l(2tf~ZR}$dg`aAZ-cSL{jkvAy2pJPei%;~|i(fq-*bbkf$m==^-= zN(o*Lik-<^M-2G8u|6T{LqwDs`eD@hPm!=OM=ie<967V8I0Ve z$vhajO|^3uy_peoJx1L%qCW){{g-U@Hd_-vQr3D)wc3w`*EY^QjYo20*gldctCdx? z)5_XU$W}kS^b+lu?`ylSK;uNx`c0t_+2sE0?_L6wA>l3L&Xho-KfoIkq`$=b9pe2K zc)tU@-+b>k&-=~wezWbbI31`ptMt?8pJ!A3*Lc4(yx%GIcSOGT>p>e~p+&HQ$9%6b zq2eZ6&^ThqY6R+R-FyPAb|#2A;`tdaqz%d4Z0yZxds=&a0R5afxYb>4iRj#=MIZiF zNR6xdvDWle!m$C#ly!XI{mW6_JH`>cj~VH`Ut!;WTk5@!w(pf#pGb9iGeamt| z=t<3zW}#Gtu-)li`!~^8LFE+AX9+s~PL|T^`zKSK+BcYDQXQ}#Fs#8u#WjYCjZUp; z_0ZzpXR%b22kg;2xEGUAT=)zmu|ZLCF&_Qayp7C@KV%*d;)YdEU~p*Zf-{#nX>r27 zjV<9Y#5fsHR{goVJ8QGg5QO_S?s00d$wRRNofeOO8ctE`i7%JbM{t_@PcS-o*PvGf z%(;*K7WQ;1n%&A%C?%xi$ywg{29_;jG7&R}f*CV(pB1_@FX*kb}%m*_l*F>sQHxxMWL8jH5 zwcK*22mWM|ky217U&>pFTlrVP#xJM1g0nMi$0As90D71jBYFO|OOceD+@J6kMIz}U z#@+52l%T9r%ibK!R{T7+D)%C?82sTT;mYxAj@n`8N*}+D(-d**GiM}eeCZL;_zuqc z58|@s8Jwd*AENU{(%U)P%ou2rD@4v2BPfAc54&Q{?Pe@A$u~P^jI3myx$jNc2Ql~c zq&!=x6WB_v6FG?20>7wF6-Y2vcpP=oeX#2ZtI%Sm^C}k*)P7jManUndbx&e=zBvWK)xLG?XlKM%e+$9ey2Ss) zFI-K~Q9U4i`!EpVKjj5%2s;w(y$1Tj>bdT1B5>g?n@&8)+8IWuC>qu;+;d#fnXS5uy)0u~P7@4WopxysfD^MZ zq7D$~cKI;Kqga#N-h2>WM7zp?;ux4W80iY*=E1=2nU_?jZYhQ1$AMC6|Kh2x5YwI5 zTisI6sAtGL_Iu;*lP!~GH(Do4Vk*Dnv+Pl5IESD}1Kzj+ZNx`ppe7N>Ce+tungybHPAXd2NNP`KEHAvPLx*v>I{>AjmK$_#KIcWVE?m21> zXyU4b-wkYX*GyAy`tBHe+$~*QdVO#9GpMtb3Wv+5FV<@nQ!^BRBdG8kTOleSToj#B zfP@HLmU3T;R^_=1|3qbc)xjSP^4o2RbKP1Vj88`Ym#2v)#J_+osOLS?Fa{%o+`6UL z0K>%2>%nT-+o+-QQU?DtzTWv8=2c6j_Lj3ock(<#38(gF&_yC1PbLEPxfiQqD5R6e z?k9h=FPAgiIpsy4t!1ubgvZrgH^+@zvwR54n7Rn=)1<|tCA|sHk zgn?pz{DQ>A5(Uwzui3-y7mD?CfoY`_(04W&@#W0;23p*Idus~`oxYyDqnp7(LZAR!MIAlkP zZ(7k1)!zQ_j94du8(|nRAm^>(2c~9bL?{2-?R%3j+?3{}!&x^af7oxe^H^7Jjv5VS zl`^X{UtPrWv$DbRjN4yM%?0-PRGuA~DoNK-z`sG!$a0PE z;#@C}wd+qd+ytv$L|%rVtm8DQU!Z14#?nE65148L=7i-G91J)RFQ*zC=G=Ews=`SC zO2lA?kRfW}twP`?IMdtzHTtg^FYtLQ0;vgsgnr}8G2-RNoj8O$3t-eC8iF%KiAqn0TxfcrkqIPLc!Pjv0~`Wz-&$CtBq0nw)E~y_L?jHtP2cc9&Ld`+&#(af_ z$T^$XC1rJn7+-0(O4F?S7g{0leT21SKMu1h|z7jqrf$^AJ$b!}6v8ixhY#}ZvOPVnq zWfI2WpNZ}=^W85EF0#$N7m`rLB~;;mR;3dsx09zyc%nQ97-A#Za3(p6o)ezuOL1~G zEY53lCwe$-6Heu~4J$qQfXy6!b+rtQ&3AOGK5%;INc5xQ*^f*ngg^J+5$ZR-r23Qm zlKRhlk-gP>GxM8=WrlOB4_xW|d}H;20US}!Z)TL%W4Ul6;pg}{q;>xknzY(|gL;|7 zM#lzMm2(qadFE$<%$>wpTSXHIyc&JXP}Cl~lNOZ8zBZ8@qO(>7D<}1v1dMfb!D@G~ z;8b##Y4HJDU>xOIB>IE)^n42DqdK8v19YPd zmO?qZ>m8}8cY)UAtW$5V!dJ@f^KZ`T!#=hm#md#;`rjLYp4PfvA;F&d8Paai{@?g7 z&+^|JOzud88^;?p)2!soo=ckPiU2F7I~}HD6Cxi}!@b-mANA<-hEer#bKne=L3+0v z)i=}8PVHWX5Zf2Ds|>app)*aR^R)}LTwKIG;?%E*C}$<%-Du!7y3G~{bH)ZzGTZ^I zr!|uQrgX2OzP6C2d7_sM&DnF3ApcB|HK}kD{cdT5uGZqPom0!2Rh`aq@p-a(;cqvR z<12NyhjKSlUi4==e%pQhx2F9o)Dok+qmdg-eh5@eX)7tKUU2HV@+JO_u&qwA54IWu zI950}N0jdM+8iY`y2Al$aEg&bT6DDVxQ;l@5L_*yys2|9?^2t8q(MOA&b0uK<_A=9 zDW)BNRJ~RKV~WMY-{A31=sbh@0<)~#<*p(%KART|`iaSggK*&g+`Wf%f`&ll%}`cL z65UK_BL##!NI9uh`434Uk4W$q*geq7~J~1G}&b zkj*}FA7wLB-{IzY3T$`M#g`79>icWLG|MFMU^D`HwsKu~*csW^<>WH^b9G z1D@EHV!e+^L^@M%n_FN%tTP|j)E2n=&6i{BmwFOp@pEdwpja6f1`ZwcPFlFizakjD zy_#>B#izLQlf1;Df)C{ixRrIBjJd(Km%^_j)mE0(?OS@SjDCH*c)0+5n%o)Aiq}*K zm)c<4%b}6PgPb=ey10Kv^~L={aW~IG4qXaUjfj@^=axW(^5W;$$%<3?h%A)Piy{6h zrkOzI#P1>brO$Q-CUR`0Lf1qm-_OLlitSh!G-?`XRtU!>9A%+%`I_Rh9;z|AcQ^Cx&a><@UK}|JxS;fD>};=qA6{`cue8F)%(=LEZ14lY z{ovP14No@S8t?D|WpppGRTDqM;Ac)lU7Jc8g-mm2Kg_6SG?C53P%6O6CVaHV-_8+* zcAyr}4uV-;4^mGKvPezm+3^r(A2H|EGi|!UUQ9pRK66HQ%@9`XPf<&T+s#A1S+&HT z(FcovUxU9>`*Tyh`H;v@VilTcBeR&~Kx3WU)yM~)xZE&+-6q)11LyAim7Z~bd0h|D z-aFaAlYVwR6uCmrDQB@ie=YS44jf43=2n&qJAA)W-|OA(`r0=Mewl&l%k^@`3V%v2 zx3?&+N@wuds(j0zs|Q81g!RT(lD~*oW7qE^1SA5F#e6R?Q9!j|(Bm)G@f97!+w*i4 z;*EW=a7$`&azziSB{fe%kD27cmw0C;`OR$owp))JF3eKvk#_pJ#pwxz4>&dP*W6h4 zu6m87)Vtm=*lW?;h>6cgV6=NCo_0&AJ_b$8)(R2Ih-*fBzZ|IHQ_6?-AJ2nX!)(L5 z#4_h!f$6^ckm!~e%kAIqy4}pnvsu)Ql>k!|hvb8idir9z=uCr8)s!78z5+aw=9Ik; znl`7T#v96-v?F*Y&eCH&>5pdYLE||Ngg4S_*iZpl%MbhwA zx)O1DVhC@sWX(1m<}Vu$Okr$v<_HC!DfnR(Axx-zR(zX}NfP4qE>!#~%}Cr}!-Vg$ zs(k1OveW3x0b$(riCxI^WK&;5=xmkOO&IPmV z!3_&|AiNK+ym|cUhiM{1rsQyvfC)wGaX@IF?8+juJb^3$AzE{p3+g|D&Sl z@oaZ0dg3oAYn;5@_a3$-M;P_rclHmUnXZ>_b$hFSVuioyY{)0~IxPq~1JQuMgrOF? zA+q5yUdk&!_@ZX4%3^-xT)b2Jlxk&TD10Ty(DKaWXVw<+o;Dk{CS^12QntZ{Uh535 zQ(K^!HW{Abb+*4|*v%PUbV>O*d>O{BwKM*RvJ!7ZW|im=@3PG!JdD2q-AVV+P!@^? z)t5vKvj}kNjD`yOYv8o_?PPG@Q+pMUwblLIFAzWYStfCo3{Ff1n<_4m0WmJ-dM8u{ zPQS>|smP6TME!yeP7Lzm7=iENn>#_in=`{-3gT=gd8YEljbc?k{vwNy6EnkKCFQ$Y zZ;`V?G|#hrceE!VJD>6i)ob`)-JNZ_G*NC0I75pEU@0NccC_#$8D2&MkxXadgq;hw z=H`cgV9Y_WoIOTe!OnGLqBn9cmb1%C2=z46cFLMM=pEUUJ`HjWoc4IaAG%+ln;0l; z^jB0aBS!iutZ`o?-7C4Q<6 zmw{wu5;3#f2c^jBDRQ{UIZDZ5UhUSyp5;ZSgmcCf4gNO7mP(g#k-^_WS&Xq4(Z?bL zZDrdJ;};Bm0YyajH`M>9^U~3uv19xzEZUmtjhS>`29?HgMzWEH?C6SF%j|m)fF~(D z{D2fz@CZ$kwZm$S3U>q;2J&oij$%O6X2R!z=oNWfLXj0(L|_SxWH`P`f(MRwYHRFM zX804H~9hogW} zhhF8QXtdKTs>{Ipl9GTNJK49vd1=KOaAhs3XyNwqBF=bT5(vabBU!eFyOq~{;nbc2 zw*Bc!YWaw%o3K3onir2i+2<YkTY9RW)1FGSR`m3z%XwX=V#M<%GVu&$NSiRMboQyI}xXr`Cfgoh*~AU zM@O>a1Uzcig`h)kXmVm-u$cGPe0#(#Tj|vQkZW{h)+2$;rnVMEvUWeGlEgCvrglHp z)00HHcHhy{9~8seZPw$j75UnIUXMQ`@U{E2o+8S*iKqBtFye~7+%6`n;YCWimZZH( za?jE06}%D!o{3av0Np`quaJJ;Fi=IkInOqQwMHNnjN8N!1cffcGDAlyN>es4Ae#f( zbautphX;hYi99Alh`FUCt{zr&vuMt3IpQ2SG3R;*_iGP0dQ}9thAoZ;se?Kf1;ISlsaWC@ipX;{gX|4 z&#Oq`BWr%GVG`O`lC z8T_5kpP#=m{7vESdj1yiCy#{h= mZaOZpZ{H?5207^YeRD9h9=!H#>V(ZL=3B{; zlwi?F+Kp;XYF22H^1_10^rv66;zgyBoVihUbG@?&();FDqoxmQ#0N!fbQ5H=>?6|YwnUsDQ)vSbPvsBhKXDUt0uZrRBB-U4@~3Z zJ&_iN^RN8Fxj>g;&hh&c--a)fnX(z^UN^76c*_-p<1vCn)Y9k}mS_GFVY-+&VvWH_ z7lkrfSYiU7|IK9a?a4M>0Qr5N!%7IajJj9CM*t>Q;fb%*{8t~Ehs%CGs{jjG_%4c3 zgLq4uSF>xX@-Z3x#)4dv0(t6Z4E!=W`ZuYhiAp-xy*pXF+)BsgsSG`p0qeK###B=D zQ)Z%DDWU6Qo8XCTPsh^S^83;9mRQR6i{k1Za^JtxB3Ab!3;FY|rh)o&E+7(5>wugV zCXges+U;Ty2|r4W-`cFu;!wnj_$sc(SW?NTQ^zo9* zN5ux8SsMAWA#qY^=9k+K1}Znn@wz-ZER%j`7F~_yXqD?SO0b)Cr`Rfg^)M$RbQJx) z#vggt1GM-biJlz9r6c>_ib-OH$9$|o_YsIOP@&gqq}+RV(DhIG zBfEkPyM1G#F7UvF=!+3(ykZ%k}(5$eEAT>(&;b$cLle_IQ9H(@*elz4Dv zvvO*)8Ap0<%iyMYxhRkZtAK=hJ}=M^&n~Zge{6K{hf=?ty1VHYtAd*@sk^%~WH;(l z)K_ef9|FFkEWTiQ0a(V5G#oB|+hrDLI*VuD%&MnH@(t2uxKHpumpO#sA4e)VIH=1I$)X<-W32 zg8Lqy+QIE<;5I2ya8EuA?#CWnoVHfQKQ*u)O2T$xSTPN*5d(87OC*gwZQTAtp|5jp zN`l>r=Nx;%m}xMh1V#>|ev$+$e{2nqXNkJGFPnqcFO(gvoK~;4w9e zi!cM#ZU>NCMMDglC&7V623StdX2x%W-H`}eO@+nxLO@Qv$d2|#i=88nSCtggba>rQ38asCPYXL&JgTFV1=1KPx?jE7(?4#n6#=%Do7J4it}+^KGe6<(Rf82AqZ zUrjU6@0GXX1+)=Q%TEj~vpsLiPax*`2LH0`5GIhR9{vl2|9QgyJcIutTgP9wCh@=6 z;(y+y;IHlJo~8kxyMGYZ96F!|eCpJG&BM58s0gjH_%hUT(-)Qj@O(Aso3)$nwl)r% z+-e;u@0VKqb{Naz?=hFqzeVf!q7jh4Jh~u{J6@(#R4$z~CRTpY zZEtATQ zlQlk>NXR}zQjF~XB)j3C^Jx9JC_Kkhu7t3?<2ky(%GxuH$hS*?5B5_wRvgqa_BxeU zm!dm}Gu0_b@AAksDBI?0Da>6ql8fob7ePoiHRcwnfl2T$F$X4z%Bg3TkZ7{|+*2*a zrsPjm68&C|5|ri`4!+i$p&?nD^Iy`N@#}bT7u4`{5kO#5Q*w&Q(&{!V%NSm;r`^x@ zufFH+Xg>GluYkWzd{@3DJU{8Z=R&57`1_7N7n%H`+0kHCj4pSd$YNBtsr9ANKQSb< zP3_$EPac?^>H{&q)E<@t{u%tP1J%R{?e{PIZH;A-Z_A4Q?Zi}gMC_{mX#TtW4WFOn z&)ntT-WIHEG>7L9E9KF_=y9#6HxQM1F`vpkJ#IC6T+fV9PxQFQ%Od{)-r~O}F047& zwz#mALpM5o2Y@?DGjoTX6w2n1Tiq&y;gVIQxRiF#ed3oBFoj9GzC5~mHJRjVF|#x} z>qQJJGj#ymxBpu;ZU|g@O9Miy*N@SDhHIZDW9d+T+jClvy^~yzaPQSl%7mLsQ5c@k z1PsQfyPK99b_(~@wSq&ZF22>i>*r z*>iq#V^GEFFP*uWSjLti6YzPdKa)dkA{uL^vHAYUv;l!g-4r!~+f_EVLp|~h!+aG- zqdc?^a>Wu{JSY%-M%e>x+vs-RJL&`zmpoG)#3he7ezJCJrJi~rOYDa~*3#{@BM%nTpt^=O@rbH<=f_*U z*Qv$a!+d;mc4$d_IFIm8)OQ4R7n`bMqW}$aB0VZrlxMyV1!(paxzegWI*0M#8PWgc z9y1nF)HZ`8$RX9jtIdTU6_Kw}9Y`l<#4ki#h_$ukwLKfYG2WjE~ z_#3so7_9j=)45C993Dcu_b5(4+`VcUqUeKhMQ~NJQ938mbzDR#v7E)&34%rAvO^ev zrvDesrCwx;6}C1cQLI~RP&As06Fy$Ho3CKGL=NJZ8%Mdaa}ny2A&;-~e&KFif@TmF zJrJ99P=nf@Y<1lc>e~i%SsBc3r?NEhojQdgj6cdp_g8%B z=uaVei_Ho7zAto%4btI0NG{8y?nCBLyEfq~+!|ic0*Ko2(%IscHuqsVgQj2prgn>r z9c(Z0z7YwGvSqi@wWuJkg`x0xL;v$umVg={IGgp|j8hxr@)2&APmFt6_uEy&MI zKtr2_UGZ0R|IIx()c5QLxQ|UnOo7u%0txYI8FdvQ_Zo$`V8_!H)*>=)Gsr}>>x+gA`xpk{L? zdxP(uP$`=MM_*7Ho7fV&qSe1XpR&*Dzv_dX=KG$#m?wYrH`AO^Z>~6!oRcodj9t+Z zThv-o-PURTw?($me@S99qP0};0z>Z_#wd>xxZuu z_rkrz5!_&GNvF-3N0=I3~k_(D&2>V8KP8b{@`{|geMa@X^V8L+*$EkRZ1 z_TO=NYJGNOV;bh}Vf{iM4x1j@mDN0~+s$wJoejA)8y~6p=2+);bH8xRw-xEnU4r)n zGBCgD&K?YpR3GP1Tq`i-d{I;_}sbv^9=G?HDAo>*5K6TlMm#u zjAcyS6r0do{Y^LLwtC*UlCm}%*wqKl=2#vQ-d|mn$>SpX*onuvJW>`Lj%gTii1R1_ z98mB%wc5Hf$kaW}fb=gPQhCkQ^X(NGltR$hoLw-}^L^dSx#1rghHSs59^ zPiHI1jWylo&T{H*u+V1R7`}mpgZL%HoGW$&31adf>q&kL2c3H+}Mz&XP$*(?8<<=G)D4=Ug4ScFrvGO{NzhW9;CYvtdhQT)u{pP$x;)OrKsklls=roK-o; ze_o)os1~rLaNsf!r;nI>!@N1Ostm+4?Ke%M!esjN1)-UhSI-IBk-_Z%U(5*zMwZPd z&C1YIJ1NPkshLDGmSgjq5miUbcap=FD?hg|;W4ID^3xmKM`x4SX5*_V^)Vvj=9c*V z6hN=Km@0qHWAbk^?|b;`L)Tu!Uo!pLcF$s*WcuO1YrSs?e)D1J{{Xv?zt?!yeSY72 ze=r_#*90TqCXPzWC>Fr3%Q~}+GtT+`>KNS8Y;Qm(EH53N2^QY zV6TEtP|)7>Ym3Kj-%Hgs`+c|EfT6i2kh!C+MFmzY2N+igcXDFC))U+1l)j15iOp$L z%zUn0rn+kSKsm+e1f*{)Z% zoBxm5{jFcC9~ zg(`MKfD~({aq5ocF@?MKV$dQ`LrF6p=N0wQz+hRy#`pj%Z#L`$gX$AA{&$OhBvcVCfla+0WvbAkiluRt(*wwu)^SJ|H5kO@g}8#_roz|Gc9-RBG+ z!*ZYSAdv620&+tpeL}ug=xxDj{v#PQ4A|=q1KY)9*HXQG$DrD2p`7ZNrf6W8I!3cE z9m9&WY#si$M`42@z>H{6^nw%n7sx@%`qJ3oJeobei`|#rLKCvV+kcgg!AF#XozQzI~^g zX(>%HH)YUh%EdoqMDo}(Bjf4JLA<~2jnK#Ub!5{YQO;vijobFrsQ0K54Ga+xvLFI+ zZFELrGL0z77d0egj?$LmPScX2mz~(VmK;qZW5WhPC9}17&~7cpvPwK+cuZ9fF`C`T zf)p{Dlf<~k664XxTmGV#7e5idO~lxcNjE;7S%B7-%?nME?!VjJ@imQFjy%*v0#Vr= zoTK!5(iLsS$PISUwJBTl@&YcbXG6a{GI&oQHj*$f?A|BN-Jp?dfN1=N`nIF|VG^g1 z5ryz!?%VB71EItj?ecz)gvxx$<~PCuYt=RFR$1*zeoz0N`XW7d5V=ERjr55hBP2Iu z(&cb_H~Z4vynL&H_CLAhB0+oHLc=5jfF_mMkcsCsFGe-oklDk$$isyV-L}QKWsyTf z>&Xt*yxc2(7pZdQ>f-$PSVoXWySlx4apobk>#UARZ6T-b3nS5VWU(ZxrhZ$^{t?dY z?XH|oP)J0sQ;q=valy2$GnPyz_wfV-FtncRviCseg#-|H4Y) zqsTwe%g-X?R!InWXZer?*Djjn)GiZ8n0Fd6#HoAQmO7GBC#JD}j8wJqhW7n`b(O)o zZiOv!HbvqnRcdHAr*6KA%nQw*DKT?ir}Z2qdXy+Cya3`b@MmVXf%!Z)?=NDHM8Q0AwrJqQ$!zNXsH4Odrr)wWnM zb`Ayn=>poK>reoky#g{XLUCM(;<;V{DE>8^B+t8C8_-8&P%VhgK+$J`0M<^ zwzi#oq(OOzrL=BxVGBLE^=6j0|1nc4)O)=`)VC9Niv)f2ue zCOL@8AT=rPJ8Dua=ea#BAyV2Vc%dcDyWS`U_?RTMq!3|Ps(#N7Rnex!e~Uk>rZY=9 zcdp?{`&QUZffIuUb)5UD{k@DEXaKfS8@dJ2ihDfYzNHr?^vyXjwywg6O&f{|XcqWfG^Qcjk zYfWu7#(UH}t~Ll49Ow&1xGm&j1MF@C*>fn%s*$Am1Hp0FhkKTAPxm zP_h*@&#S)ru#(=Ou|k}X*~vj8cV9}MMCtWjX=MKJbZLY9_saatRI? zAFYSAdk6Cx^`8Zhq9hQ`XF)@n|FkAOOT+IrpOLTT-)8~z2f+TXQrW*ta^%cmRZq1g z2U2naC6Cj1>UnzgdeEMyo9{uTg z7~h@&-dC_tKgK~2;VYk+h2C$7G`;`-@$XR&x3>?&@gLy6+rwXY=ODV1_{)c(w%z+0 z@A(t&_ZZ5F2J`6ZHT+!zb1q`?eBAr}oHV`v|MBlEa7>$+z8g>A8h_*~wq&q#mB)&= zv5xT*jW1U7AC@LH`;WdQKZwI*xf~|7ANA~RhFSaXH@DOrJUtu^MiuF2~_00j?jLJJSB9XBrxz;pX0=DC46p! z72WovLYzJ7Hvh9^!_Zq+&kt%wS$)mH0Z!eOot3i;e}`Ews-o^5&XFB8Er*I_HVvPk z^e;bATSl@q{vHdF%uvt7OznR}cIYiUrC4jdU9fAh z?1Z{ieQ7~3dJK+N0|J$;Yz+sBb~!aW_!el)I)KK_{s%tXOAF%vB2DnwllCwFk1Vg8 z_I$2bChhPUrm>y5c_54J^Ll4(wzuK3R~s&$Rv#E>V_%eja#`j_$~|bypjQk&&V~Yt z)EpYHtQQ-9gLnx4yKQ5_P5OV-92ywPsX26J_>G!xGPxl#@p9YNgIIs)h~Phlv<^ad0O}e95A>otoKMdgX*^V-`Dy>as-?URt2jP@hu=2Mr_N z6urFkMeL*MnVS6ro!Wnsu~-{1p^$;6t{d<##-m8iZv5Z|b6y#XvoF+x|8qip`M+!U z*g(T)*@4QaU*{Owc7M&%{4>KLW0h@n-}wl>SiaP3{)B&` z>b59Fo}d|ttM4)9$k0jOWmE3{j`^--<1!E#o{znc1B%4vp!gj~2QXEYsB3=C_SGC1 z7`iAPfl;ZmJ$lf~-|$g3hrAp*c|fp|-NSy`emCwvwj>^CrrogjpYZeBJ*>op2l`8gK8``uVVs{(K7e;dPWG3&X3TTQ_f`O>jU234KYp06q zyC{E@GgC#|of;PyYBlW9gl$4QhmsiaLr~tB)Iw8=weiDYQV8Hirn5&!Hp5f7xZM$^ z;^#AX!xKfaBRZBVBZ#tIvD&FSLItK!K%Ryx`tnvYq_0ya59s*Oyx|{YOgZLF8#f0{_yO0-a{%_Zth>~$P7ZQq0H(J~n(EAH_ZZF6FKM3_j$J;P%s z7D>`He2P){J^%Lq1S_9UdWDro`(e!7%}9iwC$Dt3fT-tIwi36p9$a;bviiHyNF0!h z{~CV?n0S^scm9kAQWbq1r0_1wPI;G^6OTd^O}sR610FHYR53)At!+|vc6s!1y|ORe z6OyRsLPy011I~MMR@@-iIGw4R)pO6iuQAtN%%5O}U}IJTL)Us_jzkvrWD-aW)BqCu zwI3)q!+JMEwVU9(!jh*&mz(mLFPxh0eZ9&tWVjT@mP=PR;Co7XBW49Yw>**R4y*i5u^oj2F^By(a)D%ZqRE^kUs z=lpUVInS2M#r5hiD;qKOcAUb~`ST1K{(M92!%eqItUmbf6isj(gl?=~%<1zME`bEc(IJ9Z@Or|0VmejlJu2gc_s?z9hwU{=CkuT^5IB1b{Z>>iTt#bWT?#Uq? zQ^LJ<407l%Ws!daZtSZh7bLT z$R7E|URW0Sj0L!)SCejyAZXseaDQ4(tg5!qb$E8+F1Ic`OW()F78m5YVPFQMPGTZ{ zFh}YnVyv)|u4Yq;?KBAd>ZR1_mH&Y}!+N;GDU$Hvpd31yY$j;Rly+MN2)6{Kr?RJX zhV@N6FfKatKlmm-ml^J^ppH19h36zTMSXj4?J%B!i*?3+{&rP01PtE4TNqILWNJ@5 zB=^AQ$ts}6`|1;U;(ghlM&~@PbkXT1-sFOXi(u}!BI*luQ(faCJfcXv)IvLp*q#cF z!uJmc_wV&pchEwqf8>K+{jX0>x~l6?5T5o_!psuik2Wt?y4r)l7k4(QcrVkT!r$>#WZ6jseGNuFhSrLAW7<+Omr$?fpf^5wV=ZolkEYqPBCNf2J z_{QxAGCG}9kr689$a1KxPGoLgI3XSeVk(^6Z!cqR9zb4Bp~R=!FHYS=9;bx%vmu|B{;82hg{n%)?>161Dn*Xx+BQU@sfBaG9nK;_@gN%GtT7u zMoq{x@k^i3Ruo;Tt7Kz^6>GKY&uPE6jW1dmPWL*}`MMWj|JzIKSbi4Dl@BdUP&Sk+ za(w80XwZkq4eMBmyhhByd~cJlp5GE8@pSe7ljRmiZpXDyi+ry?aUY_nVUJaO(a@GQ zQ}cYj@yPqcz1wg%*T@X*wOG|1Yxh9wqZO~YI`aTyt*BPXSaB}M+Y>kty~()u?xCB{ z0Rk{Nnh*|X-1g@^aS_D(JA0orooFW6-Zf|Q*YHU}`E@U``NV3>-hj50Pr|w0tps5? zJJxiJe;|)Wy4**mM2lNb@9OD|B>gYF#C>fNKs6AS3t!zQ@eA8_g#uPB~Ck?0mD z&=}R{o(dFLD-iLOm`}vc6}2CmsAS{VIMjVqFL2`N=15cFv1BBVPW1kQFPQt(aCDGe z?wP`=Zr{SL?p=H_g29B%O7aIWb9M3@-~6Q|U6e$NfQTIL`Jo zvfvR7sO4?Q;2m>~Kl=5dw!qNdufIg5J`iyd4^~>-`ga<0wa|H%;n|ibm)IU<0k}6% z%%&r1&lRHYWHDrQLEZ82LkkhvTAng&L(}U&JcRRUTjB`!s|mKl^t=rZE%jlabQY-FaNvhKAi3<>Y2*cF_ksZ^G|qO zen66exeE1FJ`GKXmS+DtXp`aIqxFLN_!aWdt7gHz;RrrGymOeb@cgJ|_HU`0)!N^zc6(Gzp-B|ILhp@C93ll`L-HT~G zkuyy<7!ncm?hCgY++V7bUKJEM!Of>+B+p8flw0?A<%#h>ch^%UDS+FmR|4N%OPkCE zF@F%P>C;=tV~uz58{tQHncmSCm+*^}!2kOe_L9nr`mf-JgFX~XQv|&)LbCBzaUWDa zMSU|&_Lk5k#KF(Ep#sL|1tJsp8_h^7$pZpB=N+t(RFQEHNEcJky+%Ml>wNEbFu#fW z>D+j;is5gqT*FOO#l7_0LW}Gnx=`3XQXeGbenoCgt+C?OU(>4f$@DrwL7E+g2dDP( zxq){89Q^ER4dTx=O?^Ga^hj!&ssjq;^$mNKLgGp!)^c+(!YPnjlhL3BnUV06b>#;~q!OG`TzSll&)%-^Z3-o{nR& z=2&Si7a$5$1C3x*xOj1Gg3Dn3-%>T<*Z8?9^s(OCh%G=)`F{b%dUqN?XePn+pagPCh3L3{3ZS z=t7*10u+S5KaWML_9wK2@VMyYOLW=dYA#z`$MUhB7BDW|)}TJ?QHgE_vnJCmizG#m zoDT$IvoMdN7}mYQ`jrlPPwbm~2}a|50+Di*G$JuBS2@4_zgU?a7`7Fv$h>zEB{_XU#UH){CB`j35F9KEXpTR3-rEXgn5`}AUvG6;HzN}N%3b1+)LeA4U5 zcgRz;Jv5kS=WY9pQ0sO54*Pr}&*!{upL2PB`VI4JPmcg27oUgmtqlTg0~Ono<;4eI z18u`~jJvwq_#AJ+EL^g$5%Zt57@GQDC7Uqf2_HzxmI^}SUp5FxAt&^+U+cRB5ArOOnM z(u8y{HyiE3liR3pb)Wg5M$eJdDTr@^RG_h5%ZK=De1RyyP(8rFknhyZ1RgWv&U*b2 zsnG-~DEX78wCs&vha3Y>!uj2*I=~c7G2GMrQ<+yqNL9p&!>>~BXEaXr)=no8y_=Ja z_CBq7nqBj$HX}ZYPZiKFh&9mmtv_0kAKBXQuTC`|bY<@evpqQNYsh!%-ZNJ9vH%-h z@h+ebp$S*r|5`ezyr{*g#YRHiISr=wnw;8JD&YM%)49Cww(oDfLb8GzkWb&QHk`ma zbVTtg&5&tEmqTK5oi;^slO|5=vyg)^Ev5jMt$^x}K z>bsBLPLpxQAC>3r_{RbU>VWu*VRowSV^QytM7^hdpJoOC-%-9)WuGMKpV{}{L1xpG zoxCSe-=}BpS#bU_(uN4M0S>v^I?oNie+cuD4HK}%G-HjzHcE&sf}-jyIb<32|3%;! zv$z=ZA^M50EACaH`{Tc$D{8*!%(n&XuqJ6~*KU`!;%k*>M2$1MYr5#!n zI?uG`_butx926X81~ugg@~hu!-}l;5_Z|-(%`X&8c?46dn*hDn zIkL_~-emqX>y0}~m6&T1^>H5kJKH(icRI>LICoJ`x6KbgiGwD>QgUOj}xPL&+aKWpKi69H*B%n0v}IpWMwTwSmw zk)v+^dx;@OSvAcz-evq&tdL@12y+wdaXTd&{{moBjUP5V+4z6HY#ZN)6w~cHBh}7H z{$x8}V-`^-pHGTu=hIN~f3@>LzWbw>mS_?AESKxXV@lvgkz0fP;Y3Mf7>IV2|}8gkN{b08>a6i`oNl(x22 zYb|~jc~_I|g$-|zPY&dl2L zT(f4)nl)=?&zA7>5#GbyQEvii+FATog5LNY@VvjVEuF4q!1Fu=r8GFPugUjA zYRs*3kcr-dNG)oV$1fUGbhO_NTz`z;pv0l{audVVjySeUu2;i%9dkp8xj7rk+#NZ` zb@hN8P_@fsc1^gjB_}s-?CFO)tMQG`#HqKtiE0>_F`qg;Nu-3Z2`#0!a;;2gb~am7 zfxCG;F5QuZ={U8o@&aNP5jl?g6*u){wQ2(|8)VizK=mg{r8qL20dlUC&a60le zF+F|keGvrhF&E5b-G)4z#5>1-<7VW(cQRfR!hqP_b2k#R`sYpciAzoJ4G4ZYf7|=7 z_q@6_=LSfvje|DkY}nRw8Nx9Jaa-V-PwmKgY_s&yGw7q91{TzjHPx@^6jt;VTajQ^ zjx;@CREq&Op&1x;j-L+CTZYE;ZGVD&0@t1F+%z4UAkuGwyv89B7~b$8S0S#xmmn{I znEa(lsM}nIjsq&VSU~yXO->xtDtXWgJTckuM9yj+Lc;U6p3(S^SXAebxtsO*ZS(x{ zQ~C^*099rE{62bE`HVjpO~f_`?CAJ{#q<}T6kRaIbW0hF2Gbsa=b5);^HyHDnL+AJlQ4ldr;v?=}`$XUX)EUxqJ$xj1`v=+- ze#TbNS%4O(oOrJYNz*$D00`&i7VOjR=>EHCNMKMqJ{=$iNOB+JY0(L zDLx`lgv-#_2yD}*?q$tcb0fk^VNl0h>v$glH*`YIYBn3JKK8fMaoNYWaW3w48rL2` zG-9u8MYmxzJyOa%|9P?@K;7m~&mpJ>sSfg+orh+7M>FbU6yUv!s(bzfh(miW9+3Dg z*s%?b>QS8c0h2t2Bs_)><6R4s#m_(YyU%N=I`FR9lS#uGc74Nv;;YG2Xjpx z{VSA;1NnI9@uZyEl9`*Hq--7eE?7YN5i5=rnVXa}Kr%jYDaegBs2#U}Skont8 zb_$Y_k@`V7WbLEy%;o|6{eXS^1WYd_xOf@1$}ZR}VgKaT;1({E@Y$Sqy*vJF@_j7r z!VYQ)_G8^<+CBr+yz*!st1>i|Lptd zaqN=ADzd4lbk#~+7rYuA0S4IZ{RWly@BxRoA*M;-N_OH;I7tL-ve|??jaM zAoxdmgOM~UVK0~(5WW+EJ7qri3#k>!uS}pQn0g}Wv*f%2GMxw~wLCEU zDY8Dm`g;eK18zmf9Ps=2pP)2UG;uo?diX98X3>cs0DK0UEI4g*Y7y?yo}6_t5=~LG57^9Y;2}|3|r4{z&_ns{F67hkJvF(%1;jU{(1tis2%Ek0H17vC*R)u z@mQe!<#q=8`u)e=FA>VVgOLi~YpQ*`V&YbOM+x)Xod}od{e05(BqY;UhO4~&{Ri}M zqQqN;+EDolc(S{Bs9=Sy&GS=ebY#sziFmc1A0C#yiBs_G9fuTntvI8J#82Ay%pbIU zuBqY(cx>N6se=9X9iHAk+V4O1UMos9&(-D~kLHcTlEyTz10Cm`iDWpzF^;J@SK$Pp z80jDPZUaN1ZR_Ak+a?M|Xd5|AN!*bYV{tg~WHJeV$s)710+%e~U~vBm9D~QLG8?;b zbHX7Qu7`J&4;sX;)BF7)`wiaHz+lxy6L5(o###}jQIE)oTRWyVFg z3pIzQZ0wH0rO51UWrvzXiI=^n%@b25Ui2QuQ}1t#KE}Hqz0r*BEN&z7@|*4pd-4#H zzTXHs{zY~oE*a|mvJj(E?gLI7fq@A@j2*iJZ;?}UB6G~jH|vYg&&|m>TpLWyqdQ~d z&g?jMgbQH+?)i(9iQ74fUwxgujX47hJqCCrhnDW1$FCGT?zwpEfNr=S#lbI-gS+^$ zp5vzkFbE9|e9_Re?HO9sU-!OZx-@5XH{`1kQa}72p-tX%D3kLZhBMXaZ%gp&-u)UZ zE56S0S7`7$4L+6SbdG=RJQni0m(<`=6MVj=Zk6Ep;YhhigQ`u+HD@BFchhLdiH?zz ze+=pVk}Si%w5c03gRYwEeRqshaS@2geN41XWXKBSk;R`^0|@gZ4&1a2!u{Y6p+v)b zsrK`}C2xYC4*UlZhA-560}&=&^&4bDUxJQTpGyojz9}cghR9L3iM=_iX9!-?x1827 zCExoZaU!+8DcLq*$qTAK>HTDsgmVt&b%$Vw-9Io=ty;8$&$yv}6B6Ml)htfd>o{Ya zfYN%u<6VbvP~lIvp82$0&eVKUla_}qLKlh?mu+R7@! zoNxFVUz^Rk?^a;fF_bpwo$ZhS+)uiSUD0{y*W1vf4#FK-^2K2hr8Q6ty; zJ!VkpN6(3I%{!G{p5)fte_coDbxcTOkX=kp>3N_nd}-=y>;y96fy@Y_8}vHEg}uL$ z^URi>yz4MPONuMZ*E>sh{~pxDd@7&52n1oGgV0J#)z9|rd8O59^3d1MXirGaf1xzF z6Kltc+Sh=I_qXHNxqlLdTn|!G&rB}d%u96SI}y{9A5Bm0T($BA2Zv>Gwt4K!NX@$| zxr|3+Rz2>{ocLP&4D6beAC)_0So}{@^5Q)>0dbT!4lIar<8%1!pINnk;^&oce!KS> zl+&MA`j(SAQ*QJIYrm>k3kx$J2 z_McFycN&o}af!FQDR}CAHFch9)q5__KH|(Fw4ZJ)VBYiY#@pz`8n-ZVO1S546!I64 zkYPBA`8ZDOKK#S$Ly7E|{$7&DdRKOXu@b|3kAwcE4~O=CiS#-Iwfcefz`)%s zQ_i7hp5997S43_6S>Ki1o%4Be_Y~>OIF44V-0mHQ&q#1aky5au|9F}`O!N=rFGPL_ zgiG%+#mr(cyM*9c%qjgdi;?-)|JGyY_}$-;BBuI9K*RJ<%-yCK7GU7LtV39~9H+fG3 zi6_g>bvBhPSc>}wA#A7BF-2JkbobpA4?5Fms%6@*KqKL&a}%DK_#h7^AB-d&Lk0*g zG;42%Fz4cL;p31m9YcC=Mn@Z;xePy#`vzbq-?s7eeZlq*T22*;S+-Bno&W-o4rY$>iE~cg$gFrwk(y%$m|S&oGvKhpaWDgji%AS~Aq$NLNU0pDIG zEkStOz=b_Omt@*brCQ#>2cnAheq@Yq^=P0Wb_%`j5J>FM>I>m73<6Tg;VXvo1%3kGb1K5om+tK|lW)%~ zz-wIZRn*R%2W_7pVf(%C8%$EQ3+7dZdLbpcAnExDs$U|eGW6!0&M7C#&QEOx#zcJ3 zf|&)+Ns1lb_n5*IvjAK}!G-VBa&ZOpxxarfwU~^Xsvkkh^!dTcRJ3-*HPYHUeyptp zBTer{Yrlj{pl=g8Y%7nkxQSy>)yr7;pu^PMU5>mouF~3l+tKL2sW4A>ql+E}<9mNa z*RhN8OczPtBryQBe{h{Vx(uAX$@}h?k=LKrVCmI0g!GriHO2q$TrDVne7mAd{)hZE z{Etde=NFX!A>}{xX1tvtEz|Tn(BM_DFnat}6!>UAj4|3z?gvi3{eO%H{EzFF{|x3TF5I@%^yJwLGWyEcBaqAaKHCTO~@UVhL<7OPFj* zI5|~9nUug6tKtV?NzleO#$M-I51&z!o%qL)m2X=8F=M&48fY8&Z}rtDI}Lf}Q!LCu zHctD;hOqFQ{voV{hc7z@=$x}3TRw!RiBflpbr#PFmu*!6jgyQ_z#Q52Dq17Sw{Iq3)qqqBRxCgKp1d1k~$}b`vq=2NE#|hJjA5L-55n zX(Dx=VEw`MI_Dj9QiAD?eDq>p1WZ=?9YzYiwvM+%aLL-=LeQ{st`dg0^lwf99y%XK zmvTp%?<1CWOs`zk9l}i>zuL^6Mj9xc4|NmHnN6vCH<-qI7!7;00fF8WTg|gfvI#Rj zy^=xj=;x|GRDGAeh?7kvMH)5%gvGTRB_;Y?p?;P63)HU>Ke_3VkNTtH|2)!2b7=gL zol7?JR4nJL zei(Us+E~HY5zFa+qkvF^rBr>c8r8@_KT zPZ2!PP}`BU3TmtEyEPky{@oeVD$1AJvS0TummXa`V!br>b#JLWPp_HT@vsSFSFb06 z{M6EIQreRB!1e*M5yO6soG@&8FC5N9NIjRSDTH`*>fS5C9vvX*%#N$1z|)fD1yEh& zwuAT~UUO!wmP$MTy2T4F$x@Mku;&P1mfPneU(jAER<=bfTQqyyx3{fT(n2 zJ-?P7#1_6q*l_?@fBgAZkhQo${N|ygvFS8?@=@+h$TqBT8gkFvWcnDbMzWPPXsxwE z9WT+-YZ%q@Am2jq*LNk(XC*n0JV_yI)@QYwDni@&X!KFY#FItkFE{fyPpR$5S+Egd zqJP+Do|Q$IZ@sbk=T>>ofNeU4u6jJj+XzTsFSPj|7>bu0`|p>`I^}(|yfbS8ykLDL zIU6=@{Bv$-Q_iEC`YONT()YHYRrtj9u{i~D@H3~t9N{gP-=0%|*N_S#>v9T~tj{S( zbmbJR+)|OekD|9C`E6Q!Dw3VF2vjEJV_%iY&IJtY$Qma707jU;tD5_A2z3_Who^3QS#kotp18fdV}5sOa#ty45?(7-h8LDF4Su2_1K{D91R8K13* z-+==2mmPz`DpQ4Z41V;CM+bho=BdQ6-eGpWl$p14`JjsAO3zOa@0a`yQHmXuUPAAX zwI=;nk$%1x+SM`fK`C-~{EFmXdsm@_-po#1S(=>Xm39nX^VCC(&6)muXbUV~A5iJl zCaf+7xx7DuTZ8PN{KNjt`PuG`f6N`ctFQh{?m}$E#y`3heaGX7Ecw9a-{fC&e)d%I z>71Ya9y_2eP7e>>oVGsvOGbJ=qlWsNi*bdJ)?(>J`-H^j57~&RKIhEMIfszZGvoh~ zb5<7$*qBzMKR!mCI-vi4{&;$MNiO)~3!QUzCwHyt?!WE*Re$Xt%ii{W@BD4=Z+ogY z{_57`#)7|Y%UX{L5%To5r&2_F^`&iHTM_cg=GS`u9<*xXfNf7Fe*NlJvfQg%S@_yl zw;|V%Ggz{ev3oOv6W*)aQsWypEY72ulALm%DfI1eHThBQ`7PvVr@ z!k$0o1bNu3w`}@vrKdO7AS~>;7g>V*LVcC%v(LQG9y2drPhCXdsv^?gJt` zTyro@EZB%+a zs`UEXfd;o6?*l-eLoe#zL|WPYa#B2@9^y&2p()CHNiFkY8 z&-v5X%JfwMC=Td>67ajr0c!Sj1S>= zoU%0hH5g1!dTsRF_>ta3MJd-C1L$+9wZahAQmcnr5cDEYtD7;M;~zVN-jL@m4p3|3 z0SL3yI*CXHsAc5a!k(p%eiqPw1kHs#VKAGKH>=*m6u%M@VdXCGZ$Ab}VVI}5RQZn4 zj1@4F$YGVvl{kL2!}||>Y!#Mjya_Sg>pf9U5RT%xf4)8%AC3cG=RZm;KlUy|v%#1- zUKTKAK+nl|5yoszV|O9#0G7em%8%Xr#zf$78TG5qGJO7Y7harSHD$N6`~$C&%}4R$ zK=Cy%w-qK|mY40nrong_Z8r4s0o!&zv;3`I>=f%k#vvdmPE5(CMD3k6Dg6(PExB?f zROXetO_#ovjaq~#9Ip#|j)e$G&Br88a^9k?Kul$4j2nXgBQ>V)8|q z&LOgY0ef^2;Jd$%41oEP-H;m%JQVQr*BO{E*)6<5;6yfeuKAK(;tSpvU?agBLDP4= z3dKz-L?qr~%B(!qpO#;}^YE!1Qg460KAOlb-4N3VO?oIlL|JZyY8JnF4P z5;-Dv*e`g5BZ-yGB|ORbJn4M)!?-yScUR#WR_{Dw!^@TmSOcz+m(7my-_Zf4r{HKtnVmG7#Kj3mn?yQa| zPuAjN-HY)KyT1mx4d~$Xe`)fS;5+-@KqwCm=!xPTUfVBmUp3CvEJC5Z&3YB<38fE{ zje19TzlP#n`Y^4ay}S_1`>K@s7QUy4(NX#^=WmFJk;JGYt8O(t+;fHVQCUCx25z?P zsO-kb9v{1inz#3`RK5%^8h-4#fUzvx_@JHyJPgl!-h>iZ-k=Z3|K6DpCpq_X?c>Xj z`sB*bFaSO?1AxeS#Uxh70Fd-D7xVpp^rjhK_XzU+$w)=8G~^>YvbGI%64-PfvHxK?`K`-PEG>NH_;%T<;uBEf?gTcGvc|DK5bCt#s1Y0A%#{mubMAk3FupOl;l;sE zC+CeAP?3B`>e;;ow(nl<;(heGAc>7fB6+6a6_0@!;2TQs58-QBiQ`ev&k?peQHA$< z08HF^$>9^XuE_1k`Z1o$Cw5;l2+qopJQClL^#N2bS+?QAs`uK3K8xnehGLl+tz zJPl7`P6R&nuL(Z4AaBQ^i}0{MH1VZN9-8>likTgUo`L7`iSJ(W6F4g-P8@O?p21eV z-@|v%@hbmskO@T`V~Xfx5ktN#MU-Rz^%~s2up0mwwTE`@jv)i3ybp4&q3OGP;s-0< zo>;u|RVlNl96{)UKjRTyaGi7kxXqgl<37H_)`Qb*jKAN+^J@oVcP4-BjX`v>r{mDu z5Kh!{u3_)>OhIWFKApu^nDqIQ-s3l?lCO6RSuUvs%V(KV9l%hB%K4>)#rM`|{z-Yd|4c;c++qm2NAoNbM)JK7r`(2BM#0G&mauNg)0vm>LN#3naK*6W}m2 z592cI8)2najg zWz3C%Hq^Tq5Z@l+FhuGr^H`F^+^T7Ey&6L3qH{##(~gSd*!!4=Jp8hum&XK?QAEy~ zH&8rfOxz~GcincsC`fKX6ncoY;k~u7Pw}8;4py!>8QGf+>2W9GOLyb7lTDQ!O*{w< z^Q5tpYGBT6v$-iZkvb)kIB zzN`fl8swdXP(!P?Fg(crE9eO+YVeWc^lN;)RcT{rZPnP_9;eHk{nyH06^7-{L>01aR-V!NZf334-of2adX8zSlmOzJxtu8;vOmPQQ{sgt}E_vaYu-IthoHb z`|gp)i91T%LUD`49VhOI;+`b#1aT*cJ4M{7;+BXzP26&EXNX%N?rGvyiaT4}DsfL2 zcb>Rsh`T`Cv&B7E-1Ee(5qFWeb>h~G+aT`w;x>ufEN+Xqt>VVTT`F#yxEG4MLfnhR zZ5Q`4aaW6bxwuz~`*m@z68CCx*NXd1ajzBkJK|m^?hWGJDDF+-t`qkbaepZ8t>WG; z?j7RZDehh3t{3+naqks(gSZcf`;fQ~i~ESUUE)3_?i1oZDeh)*pBDF7ai0@+i@3iP z_XTlZ6nCq*FNyn#xUY%ZE$$oQz9sHE;(Fq~EAD&ZzAx@}ad(LOp}0H6-6ifP;(jXb zXW}{+3;&BdK-_`i4iYz8+ylftP~2Q`4;J?jaSs!BsJKUpdz83Gi|dLzT-*`j9xHCX zxW|b*O58%Yj?*v*9m14qSWqj2t7tT@iWADkG}m)Jp%H{?2u&b#2B9+vRT8>@&@@84Vy0&Tq3;nYBytkbZNkV|M4p~DE>Naz4U_Y&$yXbYiTAWlOsq4x>x zkNLXiEka`my+r6VLR$zeB=jVqRzh4SH++*2$=>i&LbnrolF&_r{y^wjLLU?QI-w(= zyY{g28mRO3EF~mIZkhvR8HvkgeDRCm{1X+!?CXEq5ju! z5}~6AeU;F`gw7{4h)_GB&%k;O-y!rNp?e6uOXz7r-GsUcy-4W42t7;aXsqXYsQ)z- z6XMFV;dDZG5o#gyLqb;&x`EIS2(2ab3qrhpu3;;o6@+#W;@kfXxmYvzG!QBzbRMBu zgys?A!JQrszlO^Rl@Pj#&`E^uC&ag_8=fIFoDi4gJwpk7M2N$=;UJhDa|nHh zP?XSBgw_zcjL>>QP<+9OgqjKMAXGc#hC6rHSf8ug9p(6+#LTDtR zY(ggzatQ4r#4p@8420>w=RHD)5qg6VkI3|FB{Y^0*R2hw5Q0H=cf%|~4-q<-(0W1_ z5W1Dng@kS-lqB>`Le~?zlF;phE+%vzp*W%E37t>qJwi2v1|05 zfBh`~K0F06?ZF??xATWE`NM!q^C3gz52Tkr7f&Vlwd05NjRVo-48V`+Km+`>mx!lM zeSKr$qC|atq&2^`E*uY^Gs<1GJRYg$r+OC_Ov*kz5>K=?yJ5FA60RG)thF&7aZP$R zy6F5!ZQPAFgyV=s36bX7NDR*rw>7-XEpu63c1sjrD2>#)@hA$G$i@*4FTHq%8|KGrA=li%SZBy! z_{3D6XpQ&@M*;QPL~9Ieglc0|Z8ba8u*r<|XVo4k~sP zM;MT+mwpVlE>1AEFneYkO%eVE6A7cSR>RR{ZhU!5L^wL^j}{Os9>$Y6K`$wLIPsZFc>}C`Xln?du}hizChCDx|AND}~h}ZQ$c(ceu$o zJTH}18xaF_ghYkeW#OhK;>?U?sf5cKo0=Hb3<4~TUSPU70}=GnoEDB|QccMco8;z^ z_IWNi(U28YX|<{HN4bsl+5F%)#ZcU>Z;dXI)EPBQhA^pf^CGRS(bhZzN6}h7Wn%!= zVcagGJd4(n!%${lg+RPD#z0i`Riv#pA{b}q$0DXpbvZ&3Ie_3IKDis z1(_CwYcH^36r>KP`p#VvzJNrGC2AWuLemo$W^YMb(myd-&$C`Alkc#>yS!umxR z@(V4qV-T5*7=|E8i||A>laZVA-27z?ji`JHMkXb0Yji0FCIk^gXCx(7F%YtsHim<^ z7of(OOIo6>;LZ3XA)sHm&KXwBLnbFs;qvSYKt&Fer4b<=+sjvj!wz6p1g32y@3Hiz8qi^ruc?GSb7%nEcoqGD=fUSyL3kE9|N)0yltZ znr;0Q=5RBTP{;MIej+9A;$mgC=?8timY_vID`5C+Qm3N(7Bd6c_a@8 zLVbP##xiDz=4d<{vgZOMkp6E8x5gW5W#ZQMk8;nahSAgr0aWHTK&L@pkzbpouQgTn zGDw(aP^Kvwy}*}(n9RYfZhoSf$kY|MvE@q^MVo*hl^|f<=nJOCR!nZKsK|6spws>F zZ)awz3Fj=#n`#;@EW-|!5~)C0Mm20`#96i(Mx!9BWQej!u|x}poa9pljfz)meHinD z3-VgJ3j2U@K`eJk1VfayVT32BQK9GAltqcgrZ}c>B|9b~@-t?CnIQ6)M-yhILNbUB zZ34Og)3_*NO^vmU(76gw5M(JSiGmH%7%@<9z~k(6bS8A3uO)y5MxL1ysXI!;QGF~* zl8V5=T(l${A0=IvooDnxaiImZy3L6tP}mFt0czuOV;G)b+C08E?H-rsipUk_(3!zt z1q#hl)l=wj9Faaw$zc_um}bFTh9yk7}k2;T35E<768hOU0iVOM4c> zDJ~Z0L}^4}KHI+x;x^(&nqm>^I_w?D;>E4uC6ov=Fp{Yop+)u4rl#mJCV)U`O*Da; zGT3Wd!?6aRCO-cGeO3GvDH(L>{3o+I`=T(r5(+^p84fZ|IB1XjzWFfK$OG^WK6y+R_z1PYB{LW#B{N-_ey)+Vkb=7M6FCxkiK<=IG5wY-W^ z(8J=9CJby6`&4LN)CdUs{iYIAv5BmxYU`yHWE zfZrdg@S&|KAhQZ6jZ{CgDGk8U(0;K5FnWb)R6$CwkimtDO*f_5 zU64XMTc+~-i$gx;S+SD%XIGb{J$2PK*F z5CwY3d5otD1X&BLr*h)DEjy$fGBjPd~TFr#2&Oc2I0 z^CmE~EhM&0x|#-CV$6eXP~)?WfaRRl5QhEM}U7U zF6t!J*4U)%e9=TA_0{q`%G6UGt*wyrF*O(rFj~N{Q#C;5?va}r>Gv~##(EU ztpJs`a7GTSR*eKRIwexECCq~EtfW50ZV#+MY^e|tjM2sw!Bvr*QnC798vcLyfXg(SRT{sT|h!eyz>P1ktVvq)P9KSL>pat`rrHMwx zSrWmrybhfv{DC2Al{yyDlwL0isclNsVH9SwEVUPKR8XsqM~o^odh}?rFDPo%in=I! z6wIvJB9y>p%+?SYixbfVM2J`dV*!3Hg;GI@&tYM?m0E4Im313tM(zGOQiVIwP)V9* zPil-?N@eD-OF~^=_Gti|XAsa;pipvwF!?uG}7&9aqW{jO80^@d}ye!&!0XjJvgW_u#*KdPPlIZs` zuWt0t@AxQofpQLZuS z-53TPw84O^!NgS$saeOF4$BY3w7Tde%%DtU7zM*Iqs#{I2;?-@g$<1@C^(ym{pl>I zlA#zQu6(Uom>ZPhx{9?}y)nEjTb49I0;DYykVmH*Kq+I0p|A`Mj@m_$Y@r=BGMEft zA>jmrWhd&L#_0^4nZV*<5lJGoi@r{2r2TJ~GIro9%cQR}M!Atfs3n+P!G@gcQHz;S zAie^!fiuYIc7K5D0p-~!77C4pKq{P?d!WionL} z=1I9~u49j;?2k0;+GtSDEF%Is77eZ-v^6G@6U3-*rNr#{m6gz;vS*2MKAna@a=NHO zS81~8bhY}4ysZ7w8VA(cXKj?-gYOk21Sph=1!l2TH2N+8**@?wX z7j6ZU&3Ih~wcahGs$|x(%nNChV!{;vVJ2g5+C?oax757+g}*SMDpWFEK%@Oy0_!%r zb7hBwLXO3ytw?YgZsuqF9JF(uQq@w5(7?zwzw_=wZ$RGO)Rb z1aY{AWafP`6b$p#IBmaFTVKy^GD1{TL0Cw+9m9!x@0%)SQnRhK)36cSWNEuzql&3` zlO#+4wtx~BZql6`Q9@A}i6U#KJvL+l`DON#LSVazv^Lga`oWS5MhmfNLBYTXYl(t~ zDTVwEB4|f}3jDc|eJxCJdIT(j+M#NzlsBf@7>s?O1Q;~6eWDUq7FoVD8kJ3OaV?ja zt+JcO_-dmWi4}HUW-kTgvka(g89mEj_!RNP@>&gAW{&4QBW~z;tNovCoxk4hH12D=} zev660!U}hwdM`W?O2n`2V{fc>ICX9=XiyLG!x4yAWiiCud<**V1 z@~_^lB~m^&`|}C;`CqVG=GPatV_VHB6i#aHRMon`u2B5VY7x`iiGhJkYz@A?N6zte zHh&*j~V+`%-PucU2rqN!;##4UFEWMyekKG$%DfgGnv(#jGKPq_&*R zss~*FZPd+M5^jU0h*gf=krsk1@9#uhMrR)k=$I^Rq&l;e`LJ*~p zB?qRoK0Gj5ciRI+3#IdB&!07I`srezNae$TQq{DTTl~y6R5VYFGfwdC~ zgSp&FLh5iCCO*4oEYdV!#P_?_Zg-&rX^3Mdnu>MM&2MhuCZsP&F(l2w4Lgq*{7lD> zQgLEd4JwZe)?<{uV`!q-le#{G01w$Oz)B~@7J(J4?1HD3sqrZ0Mq^Pj<~t4+85*E2 z{-;@rCW+`)NiE!og_m$_0giYWEvGN0B~HpgX=r7#Or9xB#(HouZ}Y|`gTBOyl97Cn zCc!?RC{QmT*wMi>^ap7}dZI6~QS{+HNxh)uq#B%5z!y^s>tI zDhkUg%}EqgpgWK*ah&RKo3z^M`VRHyYHMeAW=@Pb8 z@WIX_&b=+h?t3-o^gaV&7Qu$NH9T{Gr;;V0X7`ahE%@{D*Q3|s8C9pkOips%`w(Z~|GyAppKwvvY!HDIKG(`h#H?DBZ{yJ@jOkdK z1}typ^pT8^d=89}7uFRxqIYr(nE3=|BknLbfo!5q$)w6c<@gQlOljo=RLBCyPLH2k zs7-{4Y9EVDM=+?EnDNcUr|jD5V%|Nstw=^U{INb#zOI|5gUQ@rWB_Lp@&FT#J*gU} zgo@fmwmB%=4sM{_O8o-H3ed^lX;SfyS(meoieZWlO=E_k8CjXZeT;rICl4da$R!+M z#4a0G)=)OF%}4~T_z2XL5(<3=?jb3Hz05w9ZDQ5K0!MC@N9wIzc`U5a>%Y^wYCw80DF2J|(izIo$LTEpMpan@R?bb>wIWHivPAS#G2)^^Eu*4x^g3qN74`s4f={q%w#HLo<`LsG6~qV1x>SffiTK+y zT*RUy!w=n@05v4DtFSHvC@m&bqZ0MuafHaT000ltE0acgrFbdv7v{XDiPg0#bZSD|h`VRDpvWL=aV9%*_z@ ziDq9(l(9$C55-qa*q(` z>G;7&;s~O-Q>vy`6~BlrtL1k>OkI+ zpeY!sgm6G9y@onirKxY=T#iGkyZO+8nwr`Q_*CZ*W1g&*bv33w9%9Hd+JLN|p&Vh8 z11ic$SmjAbX5e{T%$Bk6V(e4JoYx-JIo^Vu7HpOB*rgo25!S`J zT3FBV2PLek{s30l4u<#T{32TWrO=dtPD>SC#LUsy9+np>VxWuc+ohP@?^XxL$}w$o zn(~tprtZIZLh|I$v2g?mH|6rlkp{b=-Xpf!1Ts#lB^hAbkQCJuanF?b5cSnauE zK=LnX#JEBt(LG?!lAT=9PI+P%wc)(T@+CYeV#bG2V`4roc{CM-iWs)X%g>?`pg zeQ>4K6vli{W`TtD35$&xLq)$VQ-sOB2#ZdGlijCMlBX<%=J<1;xx?nee0DOL!{Mst4YW4}@UXuiET#L7-~_yH5`h75&af3+$}~>0f=(An)zQ{!GynFb zOK_Z|ma_ImKfy{?Lel@H^v32%BKY}Ep`$A)^uCp_0Zz%#Lv$4uHGAo7gk6WFmm-vh zSdvgfIu12%bZTCt@Q=l1px}>gkWdt^)NU%?UA3~#@gXJ>+)<{V7h#h|)$_hR0R%TFkoQw7&zKdEpLj^P8KQABONWiWs_zQLndzy%qrPLets1fEV6Liq1OnU_RiqnFe zZCldVMm-07mC8UB4zr#d$i~d`IV}N?sfpR3fY%9V?a&otE06wQn#7wbwGE?3W0gu3 zRgEKjH+}KvmAxmG)c%&fiB&d_nxWYZ7Rpb>&J*5s$F37z9IbO|WhbjljsZC$1?@^K z9yO-}gdd~zj8*(+q1AzLpp>b}s*khe-Me=hi7Lv=K?3THV^+3?Pk{)chKp zQ_@FqVc86xm~zNajuGS(!#E0B9~XcUtiCCXhNt&Oby+zqg458`&O8i3juGJle|aLK zK@!=sIn*7&7}z8%G~tk``a-81lP)#K#?9V{i04Kq=Y~J?@j^^2u_(p&6vI3TYF{GY zJmjAl!fW~HJKPlj54#1mCeT5F**l3T!7Vj{iDCK_Kywm3XX=eWn}o+nw1ljq3T zRL5u#j?P(lqaWIRY9xyS8dl?7P%sPs=}8joU z&1=S3cFO3*@h0;&uFQYSqDDytSJ=(@FqWPAQjN}%a`}MZfv$rZovuKFl!5xig7bIM znB*)RoFoI(9@YYxF*gX!p@#C1JC7t8R_vd{kul1TWh9 zCoOr1!}f_W%A^X~13j5VWMh&tKNDNCyM@;{n%p*?=ufAHnSvy<**1%T7+`h(eg$+; zitWwbH~C)@ks31T3ezU!piC+Wch=;sI9bQ`l{QEn%oyY-YeTn0;WLIxfErdxi>WF= z6+6UyahvK47J@bjLtGG9&J*sK?94(h<_OVD4`b?;*OoF-xon7!nXhS>AC2*d=!BXVZ^fP7>xIkBs2 zH@;VAC!{^nY8qpnuoehlC%UGY>2T^18*ITo;v59~T{sR)5v*CN^QHR04q&4dl4il& zDe;!Vlw*GqM+Wom;yETshOG2sd+QZq8< z{xUi9kU6in89InUfa@0mJ;|3+#Kwy;EIYN(_|ncbRr`3JT4s9LmR0qQY_;M2Xlq8aLD*5e&D6?5wYK(}JwU?3d zMp|XQ=;xbXZ6hcUxEW#P0jna<344gLF{~J;s8Ym$;+QhF>%veMce-p5`RRye1*=c< zB&DaXbM%EbC+q;;W`l%#MY6mVA#UbLvl_%pClto%{SCF+@$bQMWQmW(4h&v6VNcy$ zxS5gwKrHUmHOc8T_`Ge!AxG(%e*>FQ;4H4C5x^LlxImP*x}mbjzM^@NSxyDWHb>0C zB*Qmrp-(L1FD1p~Ny{|L3?QeXN{EF=thzvZS&HDv=pU&jK?>Sg-Jh|?`jdN*FU$;P z)^KJ<9Ao&yjHrD{I>xFcngxf)KsK4yU}uI+5=)-I)n0tWJ_D6;ngwK@$7IO`Htfl~ zbIT9)Csn=2ZB!~F_&DQ=Eq$paRbr|RV+;-I+cb>vZXID4)xTuF(a9{;mS^`0F|$zj zF{5;kcU@`SG|PTY1omPBWB?nmd0-nH!lVG}B4lmjpZJE|K|0Sc~i|Eee>}+{kD)1h*2uE1$fErlSQXll9zOyKBE_2_Hd1kgk z^hZ(yHV||5oGZ&mWBgH`2`YdN8y;jlW09t7$GtL5U`{glQ8S?p6n-pT$0-R1aE+=0 ztpq%W;lo3i7%&PfjpEWjoYivpid!oVD#`0PP$kWEyP(Wph^? zvAx?9ZH~X1#O)ufT<)aFvl!4SW_A{#HZ9^?9^8FXUgg0mwH3%gnNe<IPHMoz!wKW`OpKo1VAx3~N*FwoT29pv z#&HmH28Ivj2#ODx;v^7d+IWtp38!jY3c%*h9M6C}nS zt}zehd`logTc>AT0ufq)L;wj^Vv5pYS<-~maLFsk3q0hf*vlLmbo{e}y22-dX%}#$ z7XOajsi!H;e{mQ?*^2MHm~K|vu7&!7i^I~-)7ZoTtQHb081pFE6&Iaw&92htoo7iYC{WsAWag5`A)by% z2M1PJzNGA|z%e;F5s1Tbb1nqTwM;5=K1DKYgjmn!8GWa(%NvF2a!SF}praS9urrpi z-&>gsCCicrX()T;LJT9|)3z}7o=jFodD;w^@YzeMDxyhqe6`HzoYW+Oh5&h;SC5)% zxEQL{dZ4d(sESwJvK%WO&J```mm4~BgnV`_7@#z^CJ{Bs#^CxeQF3JU#9J_A#eKZF zS;xNqEG;7WZ7#kgdKO>rwJ$*8>LOXXLF$8tmqU@}<&>B$xmQNpHl7->H$4NqC^jV= z)RgPaAce8D9*MXO@pwyY(wH&Sq3|+k82ft>UNl+Q7#$O79>ZI0v7ID`fC?MpOPY?c zxL{3-4PX0Yh#C;VzQV`}IY?+c3U?TzJoE-CZ|2A|x8ec^Tx{9oz-lz81+S1tdC>`N z)O=Y2tMpbm6pgbDSXxAA{FTE*u7TV84dzd1jK! zgs}3*eG*XjGz;Yoo<}nuteK>nxjzZr)C>*OF;BSgl26rqtG@a|PR|jjwn#n6wUhQd zxpoPhfOK;foj-bN96DM&<24~IT#Iw?j(n@Ra{(vJ^=vm$)m;#=K!A*8YLp;xEgfBK zKvMCCeun!<2`FuFtpm<5`Ztc!y6TT3N0=7M51PXfW#7;+g2}A@0x_vzc4q1+ExQ?& z_Fjt5Nhe0WKp{Haj4>x2tthawkOmv7$SHh1Nk36-ps1waX`Hmj;?krxb}jav9f)8Ut51UmQs?SbqYjiK@_rO9m&TYQpP6-u zH6B<7k}W?)AB!9g5!gBF{PjFL67QWXfw*(?{Us3Ie1gnT?rh0b4?5R6i!gk#UN`YF69%3^R@}rZ*Aap=5GFz& zU8bo0S_%?2*83+4^_~z297tOpS`Um~kdxvh9c#p!^AGy!q3I)8WTm|XmvJ*8sGqta zk!M$6B7u>i3s~iuc$Sn8iv9P9GoeGHo?n(9J4|RUf`Bmy^~wbv00+&;65&=8AUe3zM(I& zh~j>> z_Ngc9RtbgOClYbY@!>shMWF)LD5qK_v8ZXvujDCv@e1cqGP&N0Vp=1Kt<|S5JI*+N zT8=q=8NWcI6vZ5@OJEEH?|^ln3e#p37&#^cwSaG+Xxqn0pt6JK*D)6JKau+~h0W2avu zf{jsxM?HXw?8orEBmrZwyn8(3|HPiwsSQFrGJLo8~~RfbN^>5KsYM($k!86Puhd* zGBKXY0?%BG$Ed-HjMFPfF{MKzT|6BL|VzOl{Iz$OsPM z-m_Kr0#Zt9a~dZ}KS+QW)hp}|=-9YqMO4$^xP=}f+t8U~7qh54vZc^m$eSDj)APnQi8^w*TAZXznUHrNG%E zr~eiYr8LwaJ(8d|^75>Z6bp?BR;j&K6KhDs>#%JKZjL1)s3*gC0=nu(1-b*`-&AI{ z10Z=__jEb6z>X-w7q?EIS6N0hTtFfbUW$;_F6BOU{8Wnp z528!75TgmQiy-n~b24WRW%pQ@3=m>bnw#a^8)n5=6)X`nUCe1cgP%odj@y7U%-tQO z@k$=(b;gvoz|1{UEresHam(cNXf!^i((F)-DU2#o&eIjvIu^5W zZgH_&RarW3#+=hj#KtI9-+S01wPIY)GUi-!w#U>as(~ER2D4S7g zPbtjCnI5NXMmc0&ne^G2Pz0y(voEuwWw?$JaaFie8COv(uf(CXR2t^!Gjax2I16S~ zYW!T$^HSN$On_f_In)9ek6^JlH4;CA7ptLAfb$?`XO;st>H7@PvXJO>oKNA2#xZln zW|it3T)I6s(z=wpzZ$3gTZq@y(7mk=T33fdEgI~MtSglN{3Mlg6n?zVWdeTuqtE+b zo?no~=uim%hdKR1;ZXn3LHiFtSg2po;f_;HTQ zK~6t>jxV%skTYP*V5fik{!YL3`#S@=a-6Joe2~i-=JYQa4pfeD2CN<7go;KYJwEcb zuE@!9@!7YkVkfJ+80Azr{kvv5{nnl4I4xC9|CX;J@7ajMC*)cdBM(06wYJe2;Nq&B z?q;Xo+Ncv+AH_8`El$6h7Uch$6RK&&r?+EHs6FoV+Y-lf0)E07P_@kIpNkLGtzQFN zE_bqWuRz|U(|>IT@VUxy)^<9f?oO2ZO=m#vwZQG$@UM6J6N-*@`2yUB5Ce&F=$`T^qCInI`wQT`8|P}dLP{|IS*gtp#__*ZFms)J%r~+k?v8X=|XvrIaytgA?$Id zf6Wsp>sP?%N#Oj9)8Bp8ah&Iz{@b@WS=*m?22}kPH2s|uTK59#|Gksdy%o5-Ef5{0Ur)GR8tLo%X zsO994vwd=?|N1GRe%q%YUva2k)zna^eJY;e=bjRBipoL*)|SDa9_rUJJ=8yUMyQ`t z0iKx|%Gxp$@K-{iEtSYO3-!$oIqsa$fGu;tV^yJ0RaIz!^VLwllDQ#g?SfEf`+`uv z+_T^>3=L=xheBOo%A!S~0qYlqLThV71B&W^V6dI)~m{RSU0sNenv3?R!p{Ei_1LKmLygxYdLxd-ewh~IVG zTlfg1<@nikvAH)1ABQ%7r3dCtJ8)>}f$pjSSNFfXAJdfMS9OU=$FCFy|4zh19eyPm zKLmBkpDAbBf$p>e^GgpbDnGDf)xfK=t{!j=3Pl2zdn0~B+fBI~$HBiTcz77UK^p&j zU*%3au&DIF3FQY?t=jLZft{(sk&pR5#qah@P5I+F!1))%%jRDf+M|G@kaP;_d|oQz`nRgT zUj2vEe?tA|)ZeQ98|uHO{!aBBeuo%;+3Fvn{?Y2^t3OWtDeBKqze@eH)vr^(S^YNk zFH`?2^}nP3I`!{R|6cVUQGc`gzg7Pg_1{r{yZWD~KcK_3=Roy`sy|%)QR<(heu?_0 zsXtHs^VDxpzg7Je>R+z@TJ>*G|A*?|rTzozKc@b(>c6OdxBBm@|DpPysXyo{)1HIX zKT7>$)h|+ilKSQ9&sKkd`isbvS6r~ZlRPgTD{{nOPySN(eRThzZ${nhGUt^RfD-=hAV>TgiL zOZ}(Se?k4%)c4fiq5h}p54_s6Cs+L=)gPgLq52clpQe7L`e&%WLH#cEpH}|`^0!CPW5Xv|M}|2 z)xTK%E7kv|`Zub7tNQEJe@Oi&)PGL>t?Iv_{(I{0RNqm#o2~vK>L0CszWU?TpQ8Q@ z^{doBTm3rqo7ETL2&er(QEQx`{PsHj)*od2iZ2+y8!8b0+72^*?oi|3euVK$zG(cd z{LnW3T8=jUAlLZoh8bTX1I|;An*RNb`Y)*8t^WJ!f2e-{E|Y#g^$$`12=(*SAFF=3 z`lqS?RrMFDA5p(a{kZxo)L)~1r~2Pf|0eZsQ-8hs52^oz`p>BUJN3KOe_#Di)!%=k zY1a|z=c|8``qS0_s`}@te}VeT)xTW*Yt_G5{hz7-fclTA|GfHtQ2$-^ZGR9e{&>eH z|7?G-H~BMIJ6LcTi0SyV;kJMH9b^7%xb3IRaNGY(XMX@4%MUipa+Bo?i;oSr{9*C2 z;g(Nqy*AwPOJ=y`A6pLN`Dgjbrnljizo@kGhovl&JC@!|Cm|--vGg}-ZKUH^{_)5{uU~2i^hSTSt9S1ht(#MVqTdz%T$B7NM>Fv0&_}KJz z9NBQ2-i|9vPn+J3GaGKx+wo-awCU~mvf(zp9dCT*&!)HI&xYIdc05`Tp>FxNp;WoXM2NqA8-pU6XZqr+NVez!- zt^BazHocX5Hr%GSa?pm`^j7}ZaGT!BBO7khTRCdOZF(zLZMaQu<*W_2=|QSK{|>m% zq!_0D`5&9-b?R?cf4^NOd~fAlvBAXTXVlX2<&1#6Jg(ozbbHC(M=*Q#pZ{wgVJZ60 zm+`O6xwn3KXp4zipvc&CxtH%7c3u0JZr@5ns*?16>DKa#u1GJTDE-lYa!023OHIbJ zpYVSPVCkKS&cVMmF`4aL3)|_x=P$iq{#hJK^3wa~pDpKqI^W*< zH~)(A)9FxEV4mBrHt{9b7~i_xu#xkx>N?}6T+=VLidQP!K<(EX-_?lj8x>HmMe9uu z{onhvw|*JD7KI$)IQMFEaxEMUSD3erfr?gvbAue(An9 zosfGQF9TjNF`06#>;Dox(y`c=r+-c#+x~CEcVF7H_cVL!mt*$QFYY6IFYEJS@^!v% z`v>R!wKlzum)vg{j9V0swd!}NpQ)#O9!~$AxR$Q}PVT>-cW?dj@~b9hOGXhbn=%6a zRgdNA*wz+M|wi{ zjD1U&o(pQ1=EGT&^e7!KHn8UGjCdQd7k~TMf2<#DF+(lzFZTD=x{AM;n9TX5=G?vI z_@72ReZ2fn*Y@u&e_yFQLhYCqUvmm)_wJ%`{}>;-2~*>G^{bYe=Wg}emzn3>Hch8~ zm-Qzb47TX=cJ*tPoAf;6#=jc%yEVLfxCvjc6HvRxI~Qtw>X)eBrT&&XO+H+BCx0~; zne@&|&8PldN}nxiuhmp06WoU_{f3#K%=o8m{=LL+)A;q;j(?O(UsikPTD2$sqxs*| z{4H1dO%lL{+fYx_YT|En448ui)jNKe<2k#3DX*JPx-IwRf9`n*0P-H$TT-D96K>}tQT;fl^v z`dpQf?spmK(y`zBQ=Zz<25ORA^)vZ&l&1TF#@qQMGu;Upb~$%1ZKZJG~#O>Dr$#AzOZB{E`fP|7*G~Sf;+8Ux7bqc+2x9z_xe&Hx2Z$ z#&^GH;sr`n$ht;)iN{$+q

u>P zYI^+In@s#yG~Vq_k6-r#6F*<$yWU8TuesU8*J^y#nH?( z<+{!PzQ)^p)*tj|lW&OnHr)D_|LpTwn(jRH7iHwL^)1Z^xA{kIH}xH_ev$e%{t|t5 zb$r?R!^Ye8{w^b5X1vwcZMgNH*YW$R`tPX!C-rx#pS8o_d4T$dt8dG(zOR3rW77Yl z+__lo4(rs;U`ny2J5F5VOBR}#%=jxbzI`w8*J*s$UgB@l_?q+j#{a(m)83f}#&Oj7 zek30WE!?7$`iBw%gyLTB>uL|8SASym zLHyMpsh>Z8`s(jsOqR<)+&=4kReBJAosTemRJY zBmHJe|DgVw1$eOji1g1f{jjAUZ-3VOYvvzs|911ocs&=T|Et(jshauE<>&W*x%ro_ z^8RuB&6wt2TQ{^SwQ1w3`8^N29xj2yC2+U|4wt~;5;$A}hfCma3H+B!;P}&P3x+?{ z*7Lko!xyZoE$b+qeQ9s0qtsd2)mCb>xwXqTtu7r|nkMXR<9~Q>>KW|Wv5@(BN?nL~R~s|#V;NdC)H_7N#{6ggl{$ykZdx_i z1MM1QYqp(YMp-(Nze?b8X4EJ3U-|2I@zcB013On0i{tM5G^uA_edLsL!4x_@ZlW{J zWRr4ky=!esCLh$ft=Tb}?$L*w*0|Y=68AngA6D$hkD^6x&&utLTsB}z&T{P7U#ucZ z|24u~Hd1UHj2$oo7{A6IIdAea97nEx-kr#iTOK)W#uUq!Wy+jn+LsBp-%a}y1-v3x zF0RgR$A<)qg}`eqW)G_d#m6~`0^ESPe8zKu!$$WRuk>2ZkXf63f-72c!xx2C=f+dD zJ&Ws83+nriZt=@u_VWBFW*y?bN!9lMz+Mk%YqTTqt`ptQp2eYNFgHHd$0@sNv%ghn z=Re4=7WzEZc73tMYiJU?BgigGEWpi$JZB~gT+(YXYm_hS)qd8~HTVWQ=Tl>GvL+_= zE;mua%ukqGkDfKwN_eN&X2vTy%&CMak4%x~1jVsSSW#O2hRTyk=;OQ7oP?{&TxW}u zoHw}EfrJuAH?cC)NnH5F3JV%K+bnr4K{3HHKe`YGqZ2+(5Iff`YG)${ z?sFiya=FADrmN%I(<7Kq4ErflO4W94V;)^?t=-X#E4vdPYN=Ew@y!-a{*O8~^R2|! zw?kLg7M$3f?p?bIU%nX6q2}6*R{fLi^KnzDt9LDEwpE;@Ydv;F3hq?vdi`BhScnD!xhH{>wu zY&PAG$#&to*IY5h8|R=-yAO}iqalw{g@-pbZop5;NzZ+)o)*`Px%v;Tq4wHQSK@SR8~8*f{DYNW`Is zVsAQc!X%}b9g1S_>)~vWwmcw zA!&pj+{C8WKJT@zZ~Y)A{ZFsK=WBey%tSt@lKecNPtFBqZuUo^g5!DCd-?82+W+`7 zErs`*FZ(6ULv~KCI3xP0EPb|+I1c~{5BMPN6pmRIbNCoETx*NnzH+X1MC=Nju>aio zltHJ=9qkwna5ZPU4|0`Hy~Ssc6Mm;%PWH5TY_{()Q6}m1UktG!Q`yE0?QIElDb5rF z=sm!r5qNO7DcXJWxXW)Q@(c32Kk^E5%;aYOg1@4_HwqKvcFud@K=mCb#CV|Tw8725 z$z*CrxLswPuAXeRuTaG)0%tu;(+dAxv43!kwRrjIA#q+uOXofOSqpnL*y?(Vm$qY% zcePGkPlX+%)w#-OIbY%zX%OXfONd@%pRRika#imfQT6bH;Rw`d4a9T7Wx33@O}R1W zy|2aFbuuBEX>qk~CfxVO?nC${w5vC>vCrm0;&*SWUu(w{vChrskE6tOT;YEp@^>E1 zMy+<-<}q~ zX1$tjao?0Hx!t(8)ipo5m1p#fZ{r#d=E*BFrnRpnfR#_P7-m|%{ZYH$JNo{6Pxh+G zO0}<$^{0o=X=e0XNCQgW{sco?m?^DJOv*(*bm(a%>T;>YY%gKC=?=M z`s|w5N6qV0=$p=rUOkyF=lo2$(C%*a_0z#Nz3VOB?2snPJuP1LDagck+I-!G4f3hZ z&)&#AX~Y|$hc)>7Lbm}sNM=GubtLzjBdVW;MZ3f^dF&jyD0G3gijRt zzMOZ|C09>nbMN2<41M4rkD;}=B*2b&xZ5#Otj1Tcvbpv6W?o?#y?3>Gv)Nypbe5(L z_Iwt4_UIzJd2_jg4i3?*uJs4=4Eor6TEk#ptj`s;RNb*U-Rg&S8y^~F4x35roi9jV z0h1Arx@y^I_1B)GE3PxqfTjoV8S9+8!bPso;pO+fD{1bzA(qd~yg{4(`^}%dw#!<( zRvRkr!e!P%@KgZZmI9-_*Bq@yV{@;Dy&3o1!GY{jB+1@^)*xucZ4+xp`At-l={zur zwA<&cex{f**YFVDTyWDn=dyMz@3t^$H^a2MBht)nU7oA$^T^oFJ#t%%iI~SxNX0U@ zlew6d7n|nbN@?#=ylhS5^9z{HIbE*0@@ja;k!x|oh2njlbN*Hoc9Z7`baC);E4S(*XFe&Yt(hGAiJ!io18SBsObI?XuV zS-~eum~^Twom!M?_utvT*8@h8#*+qivLDs@&A?RGg4Fbq)PhuF3Ge(&FX@=>=zX>Uv0(1lsg6c_qYXDI+h!Kj7StBj+iMT6Eto!Ks%`e9+Jf1(x%Sy3Ywc5u zW)@E`0<|?3)Y_*P&9yfkHr0-QiyDij+d2n^XWQy+9bMfs?cLo20|VV%-8D>lS3hcQ z@pSv_;;FWowvKu`qCzsW?E|yz^&_Tnb98QTXXi9h=)_}#;hDCsMq8t8c&d%bBGm9e zt*xtbc%Z9mptEaw!PJ6|?(P~w&mf|%x%OJy)UmbpxraklYsZs~xwdI+)pUDpQGF3I zzhHJDVroaUrK$G0MKkTwM>ZDK7R;PDy+RSA@^srUwg_d^ zHuLZfES*@U7j$-Z;2-{?jHcSUJG;83P+T&Eg1WP#s|LsV0{+$zuDxS+L1S?T_uK7f zrBdCgg{l2Z{qMpjr&68ZlhUcw5Ps)M-u)*xdiSGud*1gZ%xMk(kJP>U?zj2h^$qWS z^aH8Xx$y63Z*%cKvZF0k1*cALOWoAIFg1nugBkxWyeBk_-;0r(WARFm=I`n?);wM7 zjWrJJO5@b&+zROAZ~S8EALspFYpn5V?=YUj_H+5H!k_K=>&61TL;qqxWOyNqX6dH$?;o#*cw>%GL^ z8`p-s|FI|d{B~^ie2#Gm`Phu`k~Z;}5yJnu2qIId3^Ydq9G`QPTl&l-2X*Yg9$ z8V7sQDSmr3&h~6$jkjHCtnsy17U%$4jZRF=Xsm4#?MuaHID8&V~wYKg~H$I^*0-9{M@^ZHID8R#u`ue zd1K9|ahI{?wfLd2=AHPJ!q52h4v6pae8i)Cd1+kTNyZvScc!t%%`F%2_xh(AYg}BP zvBtq|HrBYeF=LH$n>5yY(l0R9IJTD?Yuwt+#u}%#$5`Xi_8My(+MUK4clH%yjWhe9 z*v5_h##rOX{%D+x8+)`bFO3sB$ynpUPL;on13Sl9o zU_5~S8JCwR{6f!fF)lsK^ZSfD9`5+Qxz1Wt>{) z!~a&{`CS|=NB))1f9aW?Pd1)f@A-+wwSLdfFs>V~H|~74_s@w3Jzr<6ae1$j|B&~8 zld;AzyvJDM7CvTNvvCYxHP*Z=-#6C$EB6>{UX|Y(C-bX3)^D%oQ#sbSafQ#%qr^6D z=y3{fv@B*#_{YC5BvOl(pckgzG|#-HQzSY zIGP_yKjQVjFxEJj2aGlDM=W^_+zIaUTa)89yFdd9yXpao-m#>zSg*-;?sYrai{T3 z#tq|Jjc1JCZ#-P}`cE0xjrU1!{4L`-((6&?|&Zn)V5k`>ITna=3h6yg}gA8(mZUp8EYPa&lqdofPXgDJOOtb zYhHi{x(#_d1HSmX4!8Eahrb;cTp|61|e ze16_(ta0`qGuF8JJB>At{=3E+H-De_b{}8rOuu~^7k|95#=)O%ta0zlj5W@^NB$r7 z@jctP%f_i^jWuq)LOv>$8a|@k)sNR3&lvADE5czLK01|ruVwM$ zJ?pzIk1*EvTh27ryp;H6qSI?$*Hy;)uFIvy`o2rvSl@ZsVXW`H>@wDOUv4(m_g~&_ ztna|Q&sg7sxx;wH8Gie}A%Ek$jWsX*ea4y(y&d<>SRR@W{S;%($9&eHYQoD^`OFlN0 znl`?jd>q_z~y$?VGyNhkv~BO~y|% z-ff&Vt{aaVzr%Q^@gC!s8BZI(#dxprUU1g6l>X0APip1IfX5cJr7k{mEW;3Ng_qzD zzcb6_t?)lH;P-)V@LujO!=njx-{zqMnU_Mit?m{wz$={qBR$kflE-^k?_ z(~G6=;tCf2^y5#+)a!Z4?WPygi`#Fva}s^XF8$LH2+L=czOnSP&bOI<$kb0i&Ih-o|JrWZ@!5!J_#sn_$Ujiwh%-_4aE{gA0It@Qq5rWZ@! z@ZL%OLw4yu*$22z=^INw6{R0C^?E+`X48wMuSe;JOue3`eZcf$>1Vitg+KlH6SB+y z)17zf3(CK-^tDJIGWB|1_dU~#rBC_hJV`%fr|~jQ;V9+_(FE^t@Hj}iqBa38dtFJhx(AI*Yn5qrWZ3j)9*S8f3Qd& zGWB|1xz+Sy>8HFr(T7aEo^RGnFP47z;XVQ~^B*$xdLH^F(~G4Kx0m{msh=D6@qftl zV(DkScar~*sh`RC0G~I#So#uIu<(cJhfKYm&wj)7Vuok=4==5i`dOcRQhp&*ujkhrOfQyxF4Bigy`FbxO)tI?e1mEEqxc$JG498D z$c#_V%U^5piKTZD1^SSke$qvsx>b5(>BI74`XN)V=ke2~7fT=3U+P1qUeE78X?n5r zji^3{Oue4>?=!tv`VOvO;SbXfnR>k+@NLtJ8J_hy3_qW~Q9~dsKQ_Hs`l+ZshRpQ! z{=xq;y;%B2qz{>Ty{~Xmmp>lG(og&BCEFh|_4*#$*`^mu-x=vcre5zutTeq?`hiFv zGWB|YqTlpl>4zhI$kgk7i(%7?rJs%TAycpSGfJixOJ9%lAyeP+G9Tc2(~G6=i1Z;- zulGM*V|ualaCQs1AAdroe!$Xyi|NJEcYArV|3jvJ%Jd&Fy;%AIu3+I0(+`mcA7AM?$7v?=wB$^kV6|xPpa0-2RZ=_TTKhQ?JnW8%sYNr601(|C_x2Ey};K z^i$qDNk3%j^?ucDrWZ>;6X`>yUhiY=Grd^)jz}Le^?HBn>!ue=-^mp${OQM^kX`=Y z>bz4wQ2vdjFGc#0sh_QT{jW_gmVP$UhfKZRCp%zzvGf*wip>0nOugPeTeRG_A7bf; zxq^j1)Q9ZSe~0rrwp;*`>1#oF>O*Gw zdOz|M(~G4aj`Sf@ulFg>Fuhp%nMfZp^?Ltusp-YicSZV;UHbNZ<|?IcEPW%=hfKZR z?;J3__)ztFA9S1P#nKN)>4(hp_5Nth^kV6!B7Mlz>wVMLnO-b?jVoCA)6eh$Q?K_^ z-(z}l1AGI8wlFB`<7~i;FJ#82_glYi@rk7$<_Z@6P#-e&dLQ;@rWZ3j(;t9wkv?SV z_5SRiO)r*y*r}WPkg3=EwkKWSk0-J8VS3De$S(h%_6eM0`fg+CYf<_kQ?K`VpJsZo z^j(oYWS4(SZ%Fx%((jD)AycpSg}0hstn`N?eaO`7{o-p)FP1(muYNd&OufDj`6_My zXMBRvhucei$kfkS_`0_LKdIOI&eNtBe;PdB@et;JzU|Tb(qFdt#M++Ds60dF_UQfU z{iYX7A8!AA<)Qbn?>4`zc1GWB}jJ9VKy zp2YN`z5~XEOnu1IcYMxyr;akcSo&`7o#;cRe!%o6nO=OT`YF?&YI?EswJ80NUHr5`fW*LaFQ znO-dYbfgcNdX2w0>LS#KfTf>{^dVEP@fwdXy;%A%J#K%<)N6dl*`^muKO3bVGW8k{ zaf(sywM3xAk?$kc26$kR9pTiT&Z=^Ste%Sdow?Aa+HJ<0srWZ>;6X`>yUgLj` zx!501V(CkfK4j`OUg(jg7fav46)gPe$Dfd?*Z86{O)qA6?!UQc|Ap+*zYl@1EK&N# z(hqP23xAk?$kc26(vwXuW_YIGwGe->NFOrw8t=5)^kV57QT{`wUgM)SnqDk@jVoCA z!~BO#y~a~zO)qA6=6~j4_=83Ikg3=BtLscJmVTNmfj(rH|Ajul8BI9M^B*$x z8sGJP(~G6AyVRTc51D$62m6fa#nMl41q*+ee#q2o{Mc7aFJ^dG|Dydr7clk7cr(+B z?*-q$gl$oL9VdDRGV>F%%a4s$JD~g+OFt0p&ycCt__oC>kpF;B2hW$E>FEAv$Syx! zOgWWWX7SaHm7lOaa(hCiUgPQdO)r+d?qYB5pOC57_`96x#nR74`jB1z&u2)CFI4`G zr4P54`45?TjqiJ%>BZ7_x!9Ze51D$62Yj#T#nOlEDbo*`dW|3awCTmt&vFF|f2a?c zdW|>SXL>Qip9w2P`#)srH9ql2rWYrA#KDEk^yB<%JmUe=i=_|iFZCfaeT{!?!+V0P z&k19d`R}G17XDBlGWFdyUh)jniy5B!uBd$unR<<{Tw!{#^qrADWa>2@bB*c6($8@P z3xAmZkg2cH5zBK-FQzN=-wESFX3$Oo7Bcm9(-%!Ira$$yDE}c-ukoQTFuhp%RHP5t z=?57ym3o8p#?m*OZ!`ZPQ?K!-?>D_z`tC>{vP*x+r~etHZ!CR1(uYjF#BaPS^_PLMkg1Q`AC0&Dv+2dshsP`R zAv1lA&pjIN)$;fgOW(=Fv5=|nBw!&^ukpO6m|jeO=6@!tKOs}E@xN!7UM&3pSFrGh z>4!|c#tU;^9Ohrl@JxRg#)VA%FaZmhdW|oBy6MIAr``p$AL>JPdK-`2C%v)sbx637 zsjm~Tkf|TG@ynZ(|KPiZOurk(Mfz?>O})lDZ#BJ`;i<0&;paBZ95oo{pdLw4nNrPqJN^cue{{Y<0}nR<=y{(|Yn(uez- zz9~bEPXA~hfKZ3<3HW>V(DiheaO^n{C>ab#nQWs zBMx#u|IqBm>219K719UajpY8DcA=a4kg3=F0C{b{eV375hf;qupTG{&i>2?3(hr&G zYyN?km|iS>m|t#x$kc1Tg144!naH>mcAoMkNFRodd;_Rx9P=)s@MDs_nBUNsCv!k@NcFUAF5st zKo&pMw?AU(JEQFnncLr4V#rkL1k;NTRo`v;Q%x^ERQ<5&&oRCDQ1vy_Uu1f*^wV6y z!XNJckh%R`Hvhyb(~B9N^|uqog-m@X0SlS>0n=|Zy_o)oqA!{LxuzFOAJ$i8PiXhUVNzfIn%${ z^x{L+Yrc`&OfQ!Hp|)T1lYHFtV(IH{>zmsjGWVb6Gx@yf#nL}i`kMdbF4K#ppNi5C zndxi3lpmO0EPZ#R51D$+ukuUNi>2?3^dYSYG{b44LU`ewedNFP6R@erZ->untx`E>BZ8A=}{jt z)7N}8n@um4{-M&>{5Cn$i=_|Kn=k!on-6Ew^kV5dQhp&*A2Rc=`EzbGz4%b|GnW1v zOfQzcGfF>Xrax!;_nKZTeJRq1Ougpwxx@5g>1&ZbWa>5l&wkU3rSFXNAycpUf_`aw zvGiei_roz{>NUU6pG_~8zAH*UWa>2^(a9Ls%j+kx^tDJIGW89c-{?Hki>0qe`jDv~ zw)u{pVR~_*=L#17^y5!GVCpqL(z8u3rWeouVR=y>GWD8IDQ|kQ^kI2XA2Ritf9b`h z7fWA59=MRHuMx12sn>i;H=ABef9BtY<0g9X*AV`!g~vWyYFs=}pF(Z|S{cjP2HZOJ6qDd`ol2ns4dhJwCkVTRP2n_*j2K@=O&M#xrFR-@zNL>EYrdt=8*9F$uNiB;rJoyXzNOzAYrdrgy?%Q&-_j|@nr~^T zvF2M^YpnT}E;H7AOIJy64~%Y*-uNbC&A0SkW6ig8r?KW+`kt}oTl$T0%H~IEU+weX zX?&`2xA6tW!^Z23YsOa?*NscYns4cEj5XiVl(FVpdYiH4TlzTpnC5&-UnC!k{^CXL zDK0-DABXdo@xnEpHQ&++#+q;GTw~3*^i*Tbx3tz+^DPyOHQ&;Uj5XiVO~#sU>8-|^ zZ>eFd`If$HtofFHXsr2`{?%CXEiG8gvfAJHR?HUH6BV0bUE;HhrUe-b}C;wvM*I^tas-xBfd5&vVv---C% zi2p6(4rCtxll-0*@r4m@h?tL6@ITRSjhNlo(0^CNZ;1HTh~FRaXCwYb#6ODozK9oN zhr%hz?}-tg9`S_{uZcJvaWUfSB7R-Ow?_P-i0_Q}u88lA_)ifZkD3{7?-L?^a>VN+ zes09Yh+iJ@%@Myp;?G6=&4}-b_vo(;{9LabLvgh_8)!D&qG= z{JDsK5b+-(UW{wOF#o4Se1T`2rTlV9$9Ycn z-xDKV9`VYEpAqreh&M#MIpXI@-*>>~z@UU}wP2ggqX17VHVIvtjH9o&$Rl>|EG+uqCjiurAm#*mBtU zunS-p!dAo9z}CSof%U8Fne`GT7y?=fJLj{Wa{l zu;;<9gr#A_uncSz#{OpxHU`@Q+X~CWu7YiYjl(8j1y~VQf?W+O!z!>UY!b$P>ou?) zuxnw@hrIxH9qf8o4faCVi(oH?-2h|X_9d{F!gj%4275W|6|h&rUJZKfc-t}bFe#MpND+`_7AXsgzbZU5%wk6mtkLl{S)ka zuD2FCG5$tr@)>Hdm8NNuxG%!VLh-_uwK|&*Z^z< z_BXIs!Cnh{Gwdy}x5C~Admn5X_CeT(V4r|}686uqufk?vcfq~}`#S6!uy4Y?1^YH^ zKkPfO@5257I~sNj>^N8l?0DFTu#;h@z#aj6B9#(_B5cVS2 zi(xmwZiKxA_EOj`*vnuqhrI&!O4zGmuYpa$UI%+U>?YV7U~h!|Eo?XJX4oyTzk|IA zR)^gRdpqnMuy?}V1$#Ga59~d#+hFg7-41&{>;te5!#)E0DC}dfkHhxDJ`MW}?6a^t zU=7&c!#)SQ6ZU!77hwMY`$yP5*cV}6f_)kG71%$)z6bk0>|bC%fc+3Q3;Pl5ZrG1u zKY{%eb`R`luzO)Yhy4QfOW3bqzlPlh`weUk_OGz}VZVhv0Q(*6_pr_sKI1;m*N5$6 z#P)^abnn_#lZEVf4xcfQ%+!c$#08=LgyohoPONoP)`+v0OR5 zHa|YzR~Rb>PJNZ$VxgMfGFhCg1kRgs_y{XF7=uE)|!U|Aak0InPhI zQ{J0=|6PH5?PKr!K05>8JJ*?!@m#uhYi@L#cPUjS;ef5u_tRZC?zL_P$njNFLQWZOK13$o?a|zOYZ4OAegJO9i|;ljU+QE9ItKrCKbz-9{m@&vL|S!Br4*>o4L%^p+b0**J1l zZZydd9GaU;!DX3pVa*POzpR|E<~A3~nS6!&+&OR0jHfN-p{?#)>5QDNgPHBQz-h;5 zx~Ez#=SL>1IYeD_`w|Rf#Ra>tg?%BlIQ18+h-y4HktF`_Ew|=mispisa3YAH}vlAy&`=PD*iZ62) zihC&W4%z@;14e;kuML@sYm6-VON;sJ zK-JYa=MpxoEeV@HtmFT;G63v^dJB{Ju!{WV0xI zwB(g)rr?U8>Crr5&8-(lT{X8nkgKfy=>(-6Y=yZgdp>yS+43#tf1kwz%xp0tUu8=F| zM^O>kmV^;_4^+E@Je*6jSyS}6>MC~@ZHa{EjWF6vG8NZ$n~3N1Abek?&+Q>!%`HrG zXPU#NTq%rlQ+{+S&v_Yt^eRx(m8bq7k-^;fSd=T<%7eBE4ivfr4|V9O$x0Q6G>$^I zE4;JnhNsPil9f7|I-at9b{I?0j5JSlp(FMnnua(i;+V%t&4$;fGox2e=F7P>YP~y5 z<;=r?ja%eChGQjmEBhWz(|Hn2Kes?<{vkOe?T~k|R?5SGZM`S;dOWG5_=|{L#p}_5 z>KywjgOejw>ytT`O}R|gow^+AwOEo=5Um4?j5`!av55?B6L}uW6<@PYQC^6 zbZ7grHt9VmM!G&LU&x}%wmMVI#DZoG2?gFY=;g^1rMEs;*iv;>UXJc8P+&p_$L9U| z3IpRAe+)=ug)CG-u*aJ7C)L%tvCQOnbxi@A>rS0$5u6Lo!@2dS20SI9?9jlZ2QuYK z&g~Rr8a;t)isfu2&Al}=Pnf`CRnphZO_ZuT(-rhTc$!AMI18tv{>+Ao_i|^1L7Zw_ zKs!L$465A=QV4vlW8BP^`Jrud#eqayTrOpIw&|!a;UTuUfS%;&wp`X7RS272HJKmJ zy8cCw2^5{3K^SChX1tQazMABj+B;M;HiiduV9q0dlxvKca+ zsi6N;O_!@It@OyX=;&}I=X-eR^aRetW$aQqR!S)T>R5VoD~@_#6-OT-J(kI%$qQY! zOlHdHS9j&Dzo6pK~t#0+?(TD%4ugNlOI0>9%$Uf*Q%l@j~k>0)6tmmY(| zxO~PfPucMzx-Imn?8G%gb`p_|S)j2j{mxw7v+1(F{`8v5hX(uMIhJ?H*cuW-&ns=| zI!|+AJLLnR*HlW6=8-=AMd|I_ z>0l*T$>mT(#;Q4XJ#hKs%3~roF}fAo!`}()Gyd`-5Wf?}?I?JxRCxB8m|*=VRo&%C zdP}ZaDdn&)@+ea|Zp-aVkGg(S(+%2Yz;f;yG^?pJ97iUx8?EYMr81f+(4kt!smlK{ zH)Q4pzph5%cbv5hzXg|FtW=AW*q+MH%4iW$WOt5d;XOH$9zjyvQ`zx!R!1_{QHQ)E zc6T;ChGUnPmo6Afi?u25ufq`d8eW^VI=MQX9?y^9=qMI&%~MTRi|L|W@ueqr;<#4} z&Gx-_Yo^Tlp}=EsOM=TW<8B?ac=?!ItLZ!TzH{kwRL)$JMh(bq!TpM?J4Lkw5Bt(y zmL!k7jv_}m@|Cu^EW3!81+nY-%jv41C~dTVs8+!yB%W#riYQ`tZJi#^TnHRFX)md%94>9h4Ea=V&1E6ZvX> zJFa(Jm!XimCT+)W;NpwkhX^8l(0pteFOJ|k9HozR(u^}#n4AbARFw*x;+){<;pSaE zsjQ7M{VXpOT#Us6>J+MfdW^R@(zzXIatcWFpiGU8xMmQ&UTYUo?cKH)G81ex4c*!p z?rT||m_ByVmIANpkxV9w9m3s`;(_h5;EM$Zo9nn}=gqV4f~+TpS>Wamx4;4C85}e` zs|1dt{x($LlFg%zI%bZxxn?85QNMW!;~>R3C&47wELF-~w$WWrLS)@FU7`0HND(WmjDEduTsHqkqBI8=F!)pW1$CI|Kr+<2B} z_11yQ(n-T>(pO#VIo_UQ&!~V_xG>I>1u9sJg9^hAA2!$5@Xk`w3=iyN7uv4ZnGtq- zP;1~ensK*SwGqBrx()PA7H;GD(&7Cv7QFMz?cgP?Ti-HTLQFV%-7U^m@{RVIgFDTp zwQRQ5BXbO@kGaGp?wz-cMinn{J}3qR4USH3ND@{DC>_~!>rE}dlEPZc5yR%u-SbB??jleXMnK=DV8vBN=gS(L zIJ@fXZ^kiDv$lh*s;mCP1(n_Pc-ZGlw$pV0ljabFUIY#aRviPEF9yTWhN@mETHn_- z-hymy%uW$b=X%v6?h>5J(M-k1*X*>@S^agcbB9<-@SNswYlaU|h}aQ6UTlMFAU!dk z?s`C2Wuv3C#(5``ttnc=k!EL-sU_RQbEoTtX576o^Ghpba&c@7O&&KCd(PjR@X7Hu zrgv7G#^6lQyH46In52`94j%4}p!4FxC|dO6?2L)+MOop>FwGX$r{Yh)IMw@2b9W7! z3iM2?TXCxP{gF{$qW+YPHiY>ZL4T6zrLfUE*iYQX&MVq8v@YuLh$}gC2Y9_kVRM&a zb+lOaC8V~HZ6J3r>dII#UF9tq=zM#ML!^XDK=h<=i;8WKoe11c^}ST4peR_d&2^Wy zFibgL*p9phA-CkmaZ2$w%eU|@j_VRdf$UlVUHeRVXS3R8(FJvt(p`q9N8E$5F?Q_S zt$TMqtLEL!d*49X1%6M(-^9alUQFt*?bVhlj2etvTS*1AyIB?zwGQ>!>Rm2?5AGHgu{+aPC2XQQ!sUnNpy`9N053Xxk2vf+vLYo-0l~MNTthm(5)$m3 zpW-z~(5gAaHsO92J+Ow(8~Ubbib{O+(~Nrr1%Qf(P-W}~?s0Bk+$S|xe|>AU*1Ls0 zWbeYO&6XfKv%0$O!?^ByOQ5J@?1EueumZB#K(om%sU6kH@!dS8TaR3nSLT|aS#+gl z9YY^=(vnz{=G%HEGSRUqJd6I2-G1;YxdZVpL^V2|LHXfXaK`5lo%u4(vFHUp3OW%KrP94a_;Jsa?Ny`RweE%*o#DHh zcN&8Ajs8maAQwIxZYtK|3Sbp3=>n&9y#pck@!HFKZ$QmJV7@Em9oDYxv$gf;iFmhl z=!y(lPb3Mo3Y}KG+u$Fuug>9g>NYmFgB=!}i?NXtIrOQsSa-G2-x8b84fPcrBZl+$ zT2SZkk{8&QK&Fz$T{-vKSK@#h*rnk1M-rc;;c3f3dee z=GiH*qaV9^3GRZsM`^w?C8@BDNHR@m9{futc<;W|sWlcg3Q^>VGb8YixO#nO+ZQ?G z44Q*L?JnyAw%oZwkDQZs;2?LqEDGF{OTEBNJ?g-TskJl%f69_CS_kN8$v?ep|Co;vHR8<-Mh>`uVZTnRu9XCX0(t zjQe;&cY4=-Y}9UKAGeF# zg^Dzz>VN2-`4V^I%6Cc-H+v|!?nT?Z#Q(V5ZCr&n7u~Unt1Hw#_c8!3+D(AaEf^g@)XBbR3F2~POcC~aSm3>3?`5?;&)kSZyBECx zY-=jb7ArHpA#)W=*C1bL!r+X~{%pv{>13-m}p;-d6L7o2zgl9k9rYt`oC zX}43+4)cj!?2k^-e1}A8Db5(YDqXr(J+R8s0e4$vNzx}Snt2%i6agYw5>SeSipy$KDwui{*i#|I`LeqX9$VI;ir#>pFZM#tlcHT?HT`6 z?SISeHx9YZJ@}t_f|+zXSMxFp_s_~pae**_ zXMpSTh1?L{6|XEET>Y%d(hZr>jf0n?_q*)EC6(;9rIVF%(gXFkD7ZVB1$W`W_alXN zb?mWun~c}8c7x1&CFRy~MV$C0PcfK?Ziq3G|6iYnqN2EGG3;?ySY3R-?xCNLLg+4+ z{pACNIP~4CQE=wP686#I=cWEBc*_Cful|z?eE50k;pe5T?Wwzm!+JpPW-;pe5z zN2G_Jm$JV64}MzdQZ(~R`y;*)1ld4YPzY<6uKOKZm7qbho2iB tes1VquTLJq9e!?zM`wqh8)D3(yVSx(s=I_c{M=Au91lM?#P~`q{~M&zVs`)l literal 0 HcmV?d00001 diff --git a/.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py b/.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000..ab462f9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,198 @@ +# don't import any costly modules +import sys +import os + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + import warnings + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + import warnings + warnings.warn("Setuptools is replacing distutils.") + mods = [ + name for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') + return which == 'local' + + +def ensure_local_distutils(): + import importlib + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + with shim(): + importlib.import_module('distutils') + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class _TrivialRe: + def __init__(self, *patterns): + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib + import importlib.abc + import importlib.util + import warnings + + # warnings.filterwarnings() imports the re module + warnings._add_filter( + 'ignore', + _TrivialRe("distutils", "deprecated"), + DeprecationWarning, + None, + 0, + append=True + ) + + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return mod + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + if self.is_get_pip(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @classmethod + def pip_imported_during_build(cls): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + cls.frame_file_is_setup(frame) + for frame, line in traceback.walk_stack(None) + ) + + @classmethod + def is_get_pip(cls): + """ + Detect if get-pip is being invoked. Ref #2993. + """ + try: + import __main__ + return os.path.basename(__main__.__file__) == 'get-pip.py' + except AttributeError: + pass + + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self): + insert_shim() + + def __exit__(self, exc, value, tb): + remove_shim() + + +def insert_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/.venv/lib/python3.9/site-packages/_distutils_hack/override.py b/.venv/lib/python3.9/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000..2cc433a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/.venv/lib/python3.9/site-packages/_pyrsistent_version.py b/.venv/lib/python3.9/site-packages/_pyrsistent_version.py new file mode 100644 index 0000000..5877c8d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/_pyrsistent_version.py @@ -0,0 +1 @@ +__version__ = '0.18.1' diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/METADATA new file mode 100644 index 0000000..24d611d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 3.5.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Home-page: UNKNOWN +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6.2 +License-File: LICENSE +Requires-Dist: idna (>=2.8) +Requires-Dist: sniffio (>=1.1) +Requires-Dist: contextvars ; python_version < "3.7" +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test' +Requires-Dist: hypothesis (>=4.0) ; extra == 'test' +Requires-Dist: pytest (>=6.0) ; extra == 'test' +Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: contextlib2 ; (python_version < "3.7") and extra == 'test' +Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test' +Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio (>=0.16) ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ + + diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/RECORD new file mode 100644 index 0000000..79d8b4b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-3.5.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-3.5.0.dist-info/METADATA,sha256=qiAHEKm52YTAQjLLGrl1dHcWDN9wgUtRo_hbjC6N3og,4693 +anyio-3.5.0.dist-info/RECORD,, +anyio-3.5.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +anyio-3.5.0.dist-info/entry_points.txt,sha256=z1bvtbND76CfYuqdNZxiaibWP2IOqSVa8FQKIk4lVQk,40 +anyio-3.5.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=fxdj2SjxB6e-Q4lg_j3uDRK6MiZo-VXW4lVIV_YPpFk,3843 +anyio/__pycache__/__init__.cpython-39.pyc,, +anyio/__pycache__/from_thread.cpython-39.pyc,, +anyio/__pycache__/lowlevel.cpython-39.pyc,, +anyio/__pycache__/pytest_plugin.cpython-39.pyc,, +anyio/__pycache__/to_process.cpython-39.pyc,, +anyio/__pycache__/to_thread.cpython-39.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-39.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-39.pyc,, +anyio/_backends/__pycache__/_trio.cpython-39.pyc,, +anyio/_backends/_asyncio.py,sha256=l8780cOAPM2wbAfuPJQTTHyuKiX5efFydzSEQEHHiks,66395 +anyio/_backends/_trio.py,sha256=wu-9Sx53rUqVpyX2O7bUA4ElmkoHRbqBscRoP1xXAXU,27664 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-39.pyc,, +anyio/_core/__pycache__/_compat.cpython-39.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-39.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-39.pyc,, +anyio/_core/__pycache__/_fileio.cpython-39.pyc,, +anyio/_core/__pycache__/_resources.cpython-39.pyc,, +anyio/_core/__pycache__/_signals.cpython-39.pyc,, +anyio/_core/__pycache__/_sockets.cpython-39.pyc,, +anyio/_core/__pycache__/_streams.cpython-39.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-39.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-39.pyc,, +anyio/_core/__pycache__/_tasks.cpython-39.pyc,, +anyio/_core/__pycache__/_testing.cpython-39.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-39.pyc,, +anyio/_core/_compat.py,sha256=RM2vCmSviAmW0qFKuMaCGzn3JKD63UMNOIM6X6rGjdU,5668 +anyio/_core/_eventloop.py,sha256=3CrLCclhm1R_K_6wK3LP_Q7eEEqEwuIv7M3At1hhZkc,4055 +anyio/_core/_exceptions.py,sha256=CfFeKh4K25Z2X0lff2ahbVoBx1M3hxVagoOQwfRnEC8,2829 +anyio/_core/_fileio.py,sha256=9BAV2LZ-90cvmEQ1AuKhCNxqQeWCXr8qDqjZtHZ3aC4,18062 +anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399 +anyio/_core/_signals.py,sha256=ub6LfvBz-z3O1qj8-WkWi46t_dpcPTefSfC27NBs-lU,820 +anyio/_core/_sockets.py,sha256=7S8UKPkfgcya0qXTkUisrnrR_lEPR56HbFbvB1ehz2Q,19784 +anyio/_core/_streams.py,sha256=GiNATCZCl2BDRCOXwPMV9Bonz7NbFSa4xK_IKUb5hWI,1483 +anyio/_core/_subprocesses.py,sha256=n6cX_LNh3gyo-lTDUi0pVV6hmr5Au3QkUfAVuVeHXpE,4869 +anyio/_core/_synchronization.py,sha256=nsrsv9ee7_sEUV6uncsfg_8sfc4nO-CbM13tUXRsE_Y,16720 +anyio/_core/_tasks.py,sha256=nhM5aEbdUjOdL3aCUonp3dy1zurl7OGFSsPg8OujEmE,5199 +anyio/_core/_testing.py,sha256=bp6n3_KFC68AhUAcu0XGq0aZRYSeQWP4FY3uWtsfc_8,2166 +anyio/_core/_typedattr.py,sha256=0hYrxkAFHCEBkcIC1-goHLd5bXth5VbNkCLTojvNbaM,2496 +anyio/abc/__init__.py,sha256=ugKefsiv5Y4DGLzEYsOrS-izkTO6UNM7v9dYpbLAknQ,1980 +anyio/abc/__pycache__/__init__.cpython-39.pyc,, +anyio/abc/__pycache__/_resources.cpython-39.pyc,, +anyio/abc/__pycache__/_sockets.cpython-39.pyc,, +anyio/abc/__pycache__/_streams.cpython-39.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-39.pyc,, +anyio/abc/__pycache__/_tasks.cpython-39.pyc,, +anyio/abc/__pycache__/_testing.cpython-39.pyc,, +anyio/abc/_resources.py,sha256=VC7Gzy8xwOGrPtfjNuSjGaKVXmBy0IS4sVpEwq2vZa0,761 +anyio/abc/_sockets.py,sha256=uFgijTGLAHbrfK8JA3arScbiN0o88bf0uUSlq4MjnEg,5605 +anyio/abc/_streams.py,sha256=h_EXlQsbpwt63gd2jSjaGBLprBfzG7vcSQYIZuDI5LY,6516 +anyio/abc/_subprocesses.py,sha256=iREP_YQ91it88lDU4XIcI3HZ9HUvV5UmjQk_sSPonrw,2071 +anyio/abc/_tasks.py,sha256=bcNfMaayFOrrlpPRklklK2GfIXGWgRaP-HUs35-J_18,3051 +anyio/abc/_testing.py,sha256=LfRDpPw4FQrja9dkhzV_RovBmV4sxqvzxHX5YrV6lYc,1147 +anyio/from_thread.py,sha256=6qdCL0PS6pbh3fdDPgR2uLucrAKcVDwCaZlB_DcPeNA,16042 +anyio/lowlevel.py,sha256=98x-Z9jKxEeuvZs7KFP15bZ6D-n-SlEzmxjRRqj1YlU,4612 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=bguloPM9UfdxIGlteWnctgT2PXbs1zFRdZ_JHtIGSJc,5544 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-39.pyc,, +anyio/streams/__pycache__/buffered.cpython-39.pyc,, +anyio/streams/__pycache__/file.cpython-39.pyc,, +anyio/streams/__pycache__/memory.cpython-39.pyc,, +anyio/streams/__pycache__/stapled.cpython-39.pyc,, +anyio/streams/__pycache__/text.cpython-39.pyc,, +anyio/streams/__pycache__/tls.cpython-39.pyc,, +anyio/streams/buffered.py,sha256=32jQEEkqefrmPgAXKAQoGnNSdm5l0zzaa0V_nYkwpbM,4435 +anyio/streams/file.py,sha256=HT-u90tt-zNwlRlZhKSpFhKrWEKd4QkLPR4ySF9FfUs,4345 +anyio/streams/memory.py,sha256=4qzW3_N69w-AdixRZOkCemF6veRBcV6-2IRjL63BXA8,9161 +anyio/streams/stapled.py,sha256=euIt3fnuvs3rE7Xn5QsDYhebP5neXAoyCVcAPcM6vpE,4168 +anyio/streams/text.py,sha256=iTrT7auMl2SGvFxGf-UA0DJAdTx2ZOW663q1ucMihzs,4966 +anyio/streams/tls.py,sha256=_DSW8p4l8xh5DR4tCi_8QS83wptgMcHJ_JSzPXdNPLE,11778 +anyio/to_process.py,sha256=tXGfHyGokeVERftxEU5AvygQS8OoOdPIFXTs8a_5lRw,9020 +anyio/to_thread.py,sha256=f-SIvh1-VSg78_R5k6JfP7sXJ5epx3eBa3cDPh1s8lk,2139 diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/entry_points.txt new file mode 100644 index 0000000..1740df0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[pytest11] +anyio = anyio.pytest_plugin + diff --git a/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio-3.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/.venv/lib/python3.9/site-packages/anyio/__init__.py b/.venv/lib/python3.9/site-packages/anyio/__init__.py new file mode 100644 index 0000000..974e8c2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/__init__.py @@ -0,0 +1,116 @@ +__all__ = ( + 'maybe_async', + 'maybe_async_cm', + 'run', + 'sleep', + 'sleep_forever', + 'sleep_until', + 'current_time', + 'get_all_backends', + 'get_cancelled_exc_class', + 'BrokenResourceError', + 'BrokenWorkerProcess', + 'BusyResourceError', + 'ClosedResourceError', + 'DelimiterNotFound', + 'EndOfStream', + 'ExceptionGroup', + 'IncompleteRead', + 'TypedAttributeLookupError', + 'WouldBlock', + 'AsyncFile', + 'Path', + 'open_file', + 'wrap_file', + 'aclose_forcefully', + 'open_signal_receiver', + 'connect_tcp', + 'connect_unix', + 'create_tcp_listener', + 'create_unix_listener', + 'create_udp_socket', + 'create_connected_udp_socket', + 'getaddrinfo', + 'getnameinfo', + 'wait_socket_readable', + 'wait_socket_writable', + 'create_memory_object_stream', + 'run_process', + 'open_process', + 'create_lock', + 'CapacityLimiter', + 'CapacityLimiterStatistics', + 'Condition', + 'ConditionStatistics', + 'Event', + 'EventStatistics', + 'Lock', + 'LockStatistics', + 'Semaphore', + 'SemaphoreStatistics', + 'create_condition', + 'create_event', + 'create_semaphore', + 'create_capacity_limiter', + 'open_cancel_scope', + 'fail_after', + 'move_on_after', + 'current_effective_deadline', + 'TASK_STATUS_IGNORED', + 'CancelScope', + 'create_task_group', + 'TaskInfo', + 'get_current_task', + 'get_running_tasks', + 'wait_all_tasks_blocked', + 'run_sync_in_worker_thread', + 'run_async_from_thread', + 'run_sync_from_thread', + 'current_default_worker_thread_limiter', + 'create_blocking_portal', + 'start_blocking_portal', + 'typed_attribute', + 'TypedAttributeSet', + 'TypedAttributeProvider' +) + +from typing import Any + +from ._core._compat import maybe_async, maybe_async_cm +from ._core._eventloop import ( + current_time, get_all_backends, get_cancelled_exc_class, run, sleep, sleep_forever, + sleep_until) +from ._core._exceptions import ( + BrokenResourceError, BrokenWorkerProcess, BusyResourceError, ClosedResourceError, + DelimiterNotFound, EndOfStream, ExceptionGroup, IncompleteRead, TypedAttributeLookupError, + WouldBlock) +from ._core._fileio import AsyncFile, Path, open_file, wrap_file +from ._core._resources import aclose_forcefully +from ._core._signals import open_signal_receiver +from ._core._sockets import ( + connect_tcp, connect_unix, create_connected_udp_socket, create_tcp_listener, create_udp_socket, + create_unix_listener, getaddrinfo, getnameinfo, wait_socket_readable, wait_socket_writable) +from ._core._streams import create_memory_object_stream +from ._core._subprocesses import open_process, run_process +from ._core._synchronization import ( + CapacityLimiter, CapacityLimiterStatistics, Condition, ConditionStatistics, Event, + EventStatistics, Lock, LockStatistics, Semaphore, SemaphoreStatistics, create_capacity_limiter, + create_condition, create_event, create_lock, create_semaphore) +from ._core._tasks import ( + TASK_STATUS_IGNORED, CancelScope, create_task_group, current_effective_deadline, fail_after, + move_on_after, open_cancel_scope) +from ._core._testing import TaskInfo, get_current_task, get_running_tasks, wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute + +# Re-exported here, for backwards compatibility +# isort: off +from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread +from .from_thread import ( + create_blocking_portal, run_async_from_thread, run_sync_from_thread, start_blocking_portal) + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, '__module__', '').startswith('anyio.'): + value.__module__ = __name__ diff --git a/.venv/lib/python3.9/site-packages/anyio/_backends/__init__.py b/.venv/lib/python3.9/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.py b/.venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..f2929b4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,1924 @@ +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from concurrent.futures import Future +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, CORO_SUSPENDED, GEN_RUNNING, GEN_SUSPENDED, getcoroutinestate, getgeneratorstate) +from io import IOBase +from os import PathLike +from queue import Queue +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + Any, Awaitable, Callable, Collection, Coroutine, Deque, Dict, Generator, Iterable, List, + Mapping, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._exceptions import WouldBlock +from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType +from ..lowlevel import RunVar + +if sys.version_info >= (3, 8): + get_coro = asyncio.Task.get_coro +else: + def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]: + return task._coro + +if sys.version_info >= (3, 7): + from asyncio import all_tasks, create_task, current_task, get_running_loop + from asyncio import run as native_run + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return [cb for cb, context in task._callbacks] # type: ignore[attr-defined] +else: + _T = TypeVar('_T') + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return task._callbacks + + def native_run(main, *, debug=False): + # Snatched from Python 3.7 + from asyncio import coroutines, events, tasks + + def _cancel_all_tasks(loop): + to_cancel = all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete( + tasks.gather(*to_cancel, loop=loop, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler({ + 'message': 'unhandled exception during asyncio.run() shutdown', + 'exception': task.exception(), + 'task': task, + }) + + if events._get_running_loop() is not None: + raise RuntimeError( + "asyncio.run() cannot be called from a running event loop") + + if not coroutines.iscoroutine(main): + raise ValueError(f"a coroutine was expected, got {main!r}") + + loop = events.new_event_loop() + try: + events.set_event_loop(loop) + loop.set_debug(debug) + return loop.run_until_complete(main) + finally: + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + events.set_event_loop(None) + loop.close() + + def create_task(coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, + name: object = None) -> asyncio.Task: + return get_running_loop().create_task(coro) + + def get_running_loop() -> asyncio.AbstractEventLoop: + loop = asyncio._get_running_loop() + if loop is not None: + return loop + else: + raise RuntimeError('no running event loop') + + def all_tasks(loop: Optional[asyncio.AbstractEventLoop] = None) -> Set[asyncio.Task]: + """Return a set of all tasks for the loop.""" + from asyncio import Task + + if loop is None: + loop = get_running_loop() + + return {t for t in Task.all_tasks(loop) if not t.done()} + + def current_task(loop: Optional[asyncio.AbstractEventLoop] = None) -> Optional[asyncio.Task]: + if loop is None: + loop = get_running_loop() + + return asyncio.Task.current_task(loop) + +T_Retval = TypeVar('T_Retval') + +# Check whether there is native support for task names in asyncio (3.8+) +_native_task_names = hasattr(asyncio.Task, 'get_name') + + +_root_task: RunVar[Optional[asyncio.Task]] = RunVar('_root_task') + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + for cb in _get_task_callbacks(task): + if (cb is _run_until_complete_cb + or getattr(cb, '__module__', None) == 'uvloop.loop'): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, '__module__', None) + qualname = getattr(func, '__qualname__', None) + return '.'.join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] + +current_token = get_running_loop + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + coro = get_coro(task) + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + try: + return getgeneratorstate(cast(Generator, coro)) in (GEN_RUNNING, GEN_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") + + +def _maybe_set_event_loop_policy(policy: Optional[asyncio.AbstractEventLoopPolicy], + use_uvloop: bool) -> None: + # On CPython, use uvloop when possible if no other policy has been given and if not + # explicitly disabled + if policy is None and use_uvloop and sys.implementation.name == 'cpython': + try: + import uvloop + except ImportError: + pass + else: + # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) + if (not hasattr(asyncio.AbstractEventLoop, 'shutdown_default_executor') + or hasattr(uvloop.loop.Loop, 'shutdown_default_executor')): + policy = uvloop.EventLoopPolicy() + + if policy is not None: + asyncio.set_event_loop_policy(policy) + + +def run(func: Callable[..., Awaitable[T_Retval]], *args: object, + debug: bool = False, use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task_state = TaskState(None, get_callable_name(func), None) + _task_states[task] = task_state + if _native_task_names: + task.set_name(task_state.name) + + try: + return await func(*args) + finally: + del _task_states[task] + + _maybe_set_event_loop_policy(policy, use_uvloop) + return native_run(wrapper(), debug=debug) + + +# +# Miscellaneous +# + +sleep = asyncio.sleep + + +# +# Timeouts and cancellation +# + +CancelledError = asyncio.CancelledError + + +class CancelScope(BaseCancelScope): + def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> "CancelScope": + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: Optional[CancelScope] = None + self._cancel_called = False + self._active = False + self._timeout_handle: Optional[asyncio.TimerHandle] = None + self._cancel_handle: Optional[asyncio.Handle] = None + self._tasks: Set[asyncio.Task] = set() + self._host_task: Optional[asyncio.Task] = None + self._timeout_expired = False + + def __enter__(self) -> "CancelScope": + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_name = host_task.get_name() if _native_task_names else None + task_state = TaskState(None, task_name, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + + self._timeout() + self._active = True + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + if not self._active: + raise RuntimeError('This cancel scope is not active') + if current_task() is not self._host_task: + raise RuntimeError('Attempted to exit cancel scope in a different task than it was ' + 'entered in') + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError("Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope") + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the farthest directly cancelled parent scope if this + # one was shielded + if self._shield: + self._deliver_cancellation_to_parent() + + if exc_val is not None: + exceptions = exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] + if all(isinstance(exc, CancelledError) for exc in exceptions): + if self._timeout_expired: + return True + elif not self._cancel_called: + # Task was cancelled natively + return None + elif not self._parent_cancelled(): + # This scope was directly cancelled + return True + + return None + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self._timeout_expired = True + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self) -> None: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for cancellation. + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started and is not in a cancel + # scope shielded from this one + cancel_scope = _task_states[task].cancel_scope + while cancel_scope is not self: + if cancel_scope is None or cancel_scope._shield: + break + else: + cancel_scope = cancel_scope._parent_scope + else: + should_retry = True + if task is not current and (task is self._host_task or _task_started(task)): + task.cancel() + + # Schedule another callback if there are still tasks left + if should_retry: + self._cancel_handle = get_running_loop().call_soon(self._deliver_cancellation) + else: + self._cancel_handle = None + + def _deliver_cancellation_to_parent(self) -> None: + """Start cancellation effort in the farthest directly cancelled parent scope""" + scope = self._parent_scope + scope_to_cancel: Optional[CancelScope] = None + while scope is not None: + if scope._cancel_called and scope._cancel_handle is None: + scope_to_cancel = scope + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + if scope_to_cancel is not None: + scope_to_cancel._deliver_cancellation() + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> DeprecatedAwaitable: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._deliver_cancellation() + + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._deliver_cancellation_to_parent() + + +async def checkpoint() -> None: + await sleep(0) + + +async def checkpoint_if_cancelled() -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + +async def cancel_shielded_checkpoint() -> None: + with CancelScope(shield=True): + await sleep(0) + + +def current_effective_deadline() -> float: + try: + cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + +def current_time() -> float: + return get_running_loop().time() + + +# +# Task states +# + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance itself + because there are no guarantees about its implementation. + """ + + __slots__ = 'parent_id', 'name', 'cancel_scope' + + def __init__(self, parent_id: Optional[int], name: Optional[str], + cancel_scope: Optional[CancelScope]): + self.parent_id = parent_id + self.name = name + self.cancel_scope = cancel_scope + + +_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] + + +# +# Task groups +# + +class ExceptionGroup(BaseExceptionGroup): + def __init__(self, exceptions: List[BaseException]): + super().__init__() + self.exceptions = exceptions + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: object = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + raise RuntimeError("called 'started' twice on the same task status") from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: List[BaseException] = [] + + async def __aenter__(self) -> "TaskGroup": + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + self._exceptions.append(exc_val) + + while self.cancel_scope._tasks: + try: + await asyncio.wait(self.cancel_scope._tasks) + except asyncio.CancelledError: + self.cancel_scope.cancel() + + self._active = False + if not self.cancel_scope._parent_cancelled(): + exceptions = self._filter_cancellation_errors(self._exceptions) + else: + exceptions = self._exceptions + + try: + if len(exceptions) > 1: + if all(isinstance(e, CancelledError) and not e.args for e in exceptions): + # Tasks were cancelled natively, without a cancellation message + raise CancelledError + else: + raise ExceptionGroup(exceptions) + elif exceptions and exceptions[0] is not exc_val: + raise exceptions[0] + except BaseException as exc: + # Clear the context here, as it can only be done in-flight. + # If the context is not cleared, it can result in recursive tracebacks (see #145). + exc.__context__ = None + raise + + return ignore_exception + + @staticmethod + def _filter_cancellation_errors(exceptions: Sequence[BaseException]) -> List[BaseException]: + filtered_exceptions: List[BaseException] = [] + for exc in exceptions: + if isinstance(exc, ExceptionGroup): + new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) + if len(new_exceptions) > 1: + filtered_exceptions.append(exc) + elif len(new_exceptions) == 1: + filtered_exceptions.append(new_exceptions[0]) + elif new_exceptions: + new_exc = ExceptionGroup(new_exceptions) + new_exc.__cause__ = exc.__cause__ + new_exc.__context__ = exc.__context__ + new_exc.__traceback__ = exc.__traceback__ + filtered_exceptions.append(new_exc) + elif not isinstance(exc, CancelledError) or exc.args: + filtered_exceptions.append(exc) + + return filtered_exceptions + + async def _run_wrapped_task( + self, coro: Coroutine, task_status_future: Optional[asyncio.Future]) -> None: + # This is the code path for Python 3.6 and 3.7 on which asyncio freaks out if a task raises + # a BaseException. + __traceback_hide__ = __tracebackhide__ = True # noqa: F841 + task = cast(asyncio.Task, current_task()) + try: + await coro + except BaseException as exc: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + else: + if task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError('Child exited without calling task_status.started()')) + finally: + if task in self.cancel_scope._tasks: + self.cancel_scope._tasks.remove(task) + del _task_states[task] + + def _spawn(self, func: Callable[..., Coroutine], args: tuple, name: object, + task_status_future: Optional[asyncio.Future] = None) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + # This is the code path for Python 3.8+ + assert _task in self.cancel_scope._tasks + self.cancel_scope._tasks.remove(_task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError('Child exited without calling task_status.started()')) + + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + options = {} + name = get_callable_name(func) if name is None else str(name) + if _native_task_names: + options['name'] = name + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs['task_status'] = _AsyncioTaskStatus(task_status_future, + id(self.cancel_scope._host_task)) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not asyncio.iscoroutine(coro): + raise TypeError(f'Expected an async function, but {func} appears to be synchronous') + + foreign_coro = not hasattr(coro, 'cr_frame') and not hasattr(coro, 'gi_frame') + if foreign_coro or sys.version_info < (3, 8): + coro = self._run_wrapped_task(coro, task_status_future) + + task = create_task(coro, **options) + if not foreign_coro and sys.version_info >= (3, 8): + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState(parent_id=parent_id, name=name, + cancel_scope=self.cancel_scope) + self.cancel_scope._tasks.add(task) + return task + + def start_soon(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> None: + self._spawn(func, args, name) + + async def start(self, func: Callable[..., Coroutine], *args: object, + name: object = None) -> None: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch point + # between, the task group is cancelled and this method never proceeds to process the + # completed future. That's why we have to have a shielded cancel scope here. + with CancelScope(shield=True): + try: + return await future + except CancelledError: + task.cancel() + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__(self, root_task: asyncio.Task, workers: Set['WorkerThread'], + idle_workers: Deque['WorkerThread']): + super().__init__(name='AnyIO worker thread') + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[Union[Tuple[Context, Callable, tuple, asyncio.Future], None]] = Queue(2) + self.idle_since = current_time() + self.stopping = False + + def _report_result(self, future: asyncio.Future, result: Any, + exc: Optional[BaseException]) -> None: + self.idle_since = current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread('asyncio'): + threadlocals.loop = self.loop + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future = item + if not future.cancelled(): + result = None + exception: Optional[BaseException] = None + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception) + + self.queue.task_done() + + def stop(self, f: Optional[asyncio.Task] = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[Deque[WorkerThread]] = RunVar('_threadpool_idle_workers') +_threadpool_workers: RunVar[Set[WorkerThread]] = RunVar('_threadpool_workers') + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional['CapacityLimiter'] = None) -> T_Retval: + await checkpoint() + + # If this is the first run in this event loop thread, set up the necessary variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with (limiter or current_default_thread_limiter()): + with CancelScope(shield=not cancellable): + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer + now = current_time() + while idle_workers: + if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback(expired_worker.stop) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + worker.queue.put_nowait((context, func, args, future)) + return await future + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object, + loop: Optional[asyncio.AbstractEventLoop] = None) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = loop or threadlocals.loop + if sys.version_info < (3, 7): + loop.call_soon_threadsafe(copy_context().run, wrapper) + else: + loop.call_soon_threadsafe(wrapper) + + return f.result() + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( + func(*args), threadlocals.loop) + return f.result() + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + run_sync_from_thread( + partial(self._task_group.start_soon, name=name), self._call_func, func, args, kwargs, + future, loop=self._loop) + + +# +# Subprocesses +# + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: Optional[StreamWriterWrapper] + _stdout: Optional[StreamReaderWrapper] + _stderr: Optional[StreamReaderWrapper] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process(command: Union[str, Sequence[str]], *, shell: bool, + stdin: int, stdout: int, stderr: int, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False) -> Process: + await checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + command, stdin=stdin, stdout=stdout, # type: ignore[arg-type] + stderr=stderr, cwd=cwd, env=env, start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec(*command, stdin=stdin, stdout=stdout, + stderr=stderr, cwd=cwd, env=env, + start_new_session=start_new_session) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +def _forcibly_shutdown_process_pool_on_exit(workers: Set[Process], _task: object) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: Optional[asyncio.AbstractChildWatcher] + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + child_watcher = None + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: Set[Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). + + """ + process: Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + kwargs = {'name': 'AnyIO process pool shutdown task'} if _native_task_names else {} + create_task(_shutdown_process_pool_on_exit(workers), **kwargs) + find_root_task().add_done_callback(partial(_forcibly_shutdown_process_pool_on_exit, workers)) + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: Deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Optional[Exception]) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> Optional[bool]: + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: Deque[Tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + await checkpoint() + + if not self._protocol.read_event.is_set() and not self._transport.is_closing(): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will block until + # data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class UNIXSocketStream(abc.SocketStream): + _receive_future: Optional[asyncio.Future] = None + _send_future: Optional[asyncio.Future] = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + self._loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await checkpoint() + with self._receive_guard: + while True: + try: + data = self.__raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self.__raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError('msglen must be a non-negative integer') + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError('maxfds must be a positive integer') + + loop = get_running_loop() + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self.__raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize)) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError(f'Received unexpected ancillary data; message = {message!r}, ' + f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}') + + fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + if not message: + raise ValueError('message must not be empty') + if not fds: + raise ValueError('fds must not be empty') + + loop = get_running_loop() + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self.__raw_socket.sendmsg( + [message], + [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: Optional[CancelScope] = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard('accepting connections from') + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket(StreamProtocol, client_sock) + return SocketStream(cast(asyncio.Transport, transport), cast(StreamProtocol, protocol)) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard('accepting connections from') + self._closed = False + + async def accept(self) -> abc.SocketStream: + await checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(lambda _: self._loop.remove_reader(self.__raw_socket)) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__(self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__(self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info('socket') + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +async def connect_tcp(host: str, port: int, + local_addr: Optional[Tuple[str, int]] = None) -> SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection(StreamProtocol, host, port, + local_addr=local_addr) + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + +async def connect_unix(path: str) -> UNIXSocketStream: + await checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool +) -> Union[UDPSocket, ConnectedUDPSocket]: + result = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, local_addr=local_address, remote_addr=remote_address, family=family, + reuse_port=reuse_port) + transport = cast(asyncio.DatagramTransport, result[0]) + protocol = cast(DatagramProtocol, result[1]) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + +async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *, + family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0, + proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType: + # https://github.com/python/typeshed/pull/4304 + result = await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags) # type: ignore[arg-type] + return cast(GetAddrInfoReturnType, result) + + +async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + +_read_events: RunVar[Dict[Any, asyncio.Event]] = RunVar('read_events') +_write_events: RunVar[Dict[Any, asyncio.Event]] = RunVar('write_events') + + +async def wait_socket_readable(sock: socket.socket) -> None: + await checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError('reading from') from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + +async def wait_socket_writable(sock: socket.socket) -> None: + await checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError('writing to') from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + +# +# Synchronization +# + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> DeprecatedAwaitable: + self._event.set() + return DeprecatedAwaitable(self.set) + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if await self._event.wait(): + await checkpoint() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: Set[Any] = set() + self._wait_queue: Dict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError('total_tokens must be an int or math.inf') + if value < 1: + raise ValueError('total_tokens must be >= 1') + + old_value = self._total_tokens + self._total_tokens = value + events = [] + for event in self._wait_queue.values(): + if value <= old_value: + break + + if not event.is_set(): + events.append(event) + old_value += 1 + + for event in events: + event.set() + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.acquire_on_behalf_of_nowait(current_task()) + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + if borrower in self._borrowers: + raise RuntimeError("this borrower is already holding one of this CapacityLimiter's " + "tokens") + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError("this borrower isn't holding any of this CapacityLimiter's " + "tokens") from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem()[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics(self.borrowed_tokens, self.total_tokens, + tuple(self._borrowers), len(self._wait_queue)) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar('_default_thread_limiter') + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + +# +# Operating system signals +# + +class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): + def __init__(self, signals: Tuple[int, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: Deque[int] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: Set[int] = set() + + def _deliver(self, signum: int) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> "_SignalReceiver": + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> "_SignalReceiver": + return self + + async def __anext__(self) -> int: + await checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +def open_signal_receiver(*signals: int) -> _SignalReceiver: + return _SignalReceiver(signals) + + +# +# Testing and debugging +# + +def _create_task_info(task: asyncio.Task) -> TaskInfo: + task_state = _task_states.get(task) + if task_state is None: + name = task.get_name() if _native_task_names else None + parent_id = None + else: + name = task_state.name + parent_id = task_state.parent_id + + return TaskInfo(id(task), parent_id, name, get_coro(task)) + + +def get_current_task() -> TaskInfo: + return _create_task_info(current_task()) # type: ignore[arg-type] + + +def get_running_tasks() -> List[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + +async def wait_all_tasks_blocked() -> None: + await checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] + await sleep(0.1) + break + else: + return + + +class TestRunner(abc.TestRunner): + def __init__(self, debug: bool = False, use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None): + _maybe_set_event_loop_policy(policy, use_uvloop) + self._loop = asyncio.new_event_loop() + self._loop.set_debug(debug) + asyncio.set_event_loop(self._loop) + + def _cancel_all_tasks(self) -> None: + to_cancel = all_tasks(self._loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + self._loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + raise cast(BaseException, task.exception()) + + def close(self) -> None: + try: + self._cancel_all_tasks() + self._loop.run_until_complete(self._loop.shutdown_asyncgens()) + finally: + asyncio.set_event_loop(None) + self._loop.close() + + def call(self, func: Callable[..., Awaitable[T_Retval]], + *args: object, **kwargs: object) -> T_Retval: + def exception_handler(loop: asyncio.AbstractEventLoop, context: Dict[str, Any]) -> None: + exceptions.append(context['exception']) + + exceptions: List[BaseException] = [] + self._loop.set_exception_handler(exception_handler) + try: + retval: T_Retval = self._loop.run_until_complete(func(*args, **kwargs)) + except Exception as exc: + retval = None # type: ignore[assignment] + exceptions.append(exc) + finally: + self._loop.set_exception_handler(None) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise ExceptionGroup(exceptions) + + return retval diff --git a/.venv/lib/python3.9/site-packages/anyio/_backends/_trio.py b/.venv/lib/python3.9/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..440e54f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,833 @@ +import array +import math +import socket +from concurrent.futures import Future +from contextvars import copy_context +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from types import TracebackType +from typing import ( + Any, Awaitable, Callable, Collection, ContextManager, Coroutine, Deque, Dict, Generic, List, + Mapping, NoReturn, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast) + +import sniffio +import trio.from_thread +from outcome import Error, Outcome, Value +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, BusyResourceError, ClosedResourceError, EndOfStream) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._sockets import convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType + +try: + from trio import lowlevel as trio_lowlevel +except ImportError: + from trio import hazmat as trio_lowlevel # type: ignore[no-redef] + from trio.hazmat import wait_readable, wait_writable +else: + from trio.lowlevel import wait_readable, wait_writable + +try: + from trio.lowlevel import open_process as trio_open_process # type: ignore[attr-defined] +except ImportError: + from trio import open_process as trio_open_process + +T_Retval = TypeVar('T_Retval') +T_SockAddr = TypeVar('T_SockAddr', str, IPSockAddrType) + + +# +# Event loop +# + +run = trio.run +current_token = trio.lowlevel.current_trio_token +RunVar = trio.lowlevel.RunVar + + +# +# Miscellaneous +# + +sleep = trio.sleep + + +# +# Timeouts and cancellation +# + +class CancelScope(BaseCancelScope): + def __new__(cls, original: Optional[trio.CancelScope] = None, + **kwargs: object) -> 'CancelScope': + return object.__new__(cls) + + def __init__(self, original: Optional[trio.CancelScope] = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> 'CancelScope': + self.__original.__enter__() + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> DeprecatedAwaitable: + self.__original.cancel() + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +CancelledError = trio.Cancelled +checkpoint = trio.lowlevel.checkpoint +checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled +cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint +current_effective_deadline = trio.current_effective_deadline +current_time = trio.current_time + + +# +# Task groups +# + +class ExceptionGroup(BaseExceptionGroup, trio.MultiError): + pass + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery() + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> 'TaskGroup': + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except trio.MultiError as exc: + raise ExceptionGroup(exc.exceptions) from None + finally: + self._active = False + + def start_soon(self, func: Callable, *args: object, name: object = None) -> None: + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + self._nursery.start_soon(func, *args, name=name) + + async def start(self, func: Callable[..., Coroutine], + *args: object, name: object = None) -> object: + if not self._active: + raise RuntimeError('This task group is not active; no new tasks can be started.') + + return await self._nursery.start(func, *args, name=name) + +# +# Threads +# + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[trio.CapacityLimiter] = None) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread('trio'): + return func(*args) + + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + return await run_sync(context.run, wrapper, cancellable=cancellable, limiter=limiter) + + +# TODO: remove this workaround when trio 0.20 is the minimum requirement +def run_async_from_thread(fn: Callable[..., Awaitable[T_Retval]], *args: Any) -> T_Retval: + async def wrapper() -> T_Retval: + retval: T_Retval + + async def inner() -> None: + nonlocal retval + __tracebackhide__ = True + retval = await fn(*args) + + async with trio.open_nursery() as n: + context.run(n.start_soon, inner) + + __tracebackhide__ = True + return retval + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, 'trio') + return trio.from_thread.run(wrapper) + + +def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval: + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + retval = trio.from_thread.run_sync(copy_context().run, fn, *args) + return cast(T_Retval, retval) + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> 'BlockingPortal': + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, 'trio') + trio.from_thread.run_sync( + context.run, partial(self._task_group.start_soon, name=name), self._call_func, func, + args, kwargs, future, trio_token=self._token) + + +# +# Subprocesses +# + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: Optional[int] = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: Optional[abc.ByteSendStream] + _stdout: Optional[abc.ByteReceiveStream] + _stderr: Optional[abc.ByteReceiveStream] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process(command: Union[str, Sequence[str]], *, shell: bool, + stdin: int, stdout: int, stderr: int, + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False) -> Process: + process = await trio_open_process(command, stdin=stdin, stdout=stdout, stderr=stderr, + shell=shell, cwd=cwd, env=env, + start_new_session=start_new_session) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: RunVar = RunVar( + 'current_default_worker_process_limiter') + + +async def _shutdown_process_pool(workers: Set[Process]) -> None: + process: Process + try: + await sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + +# +# Sockets and networking +# + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> 'NoReturn': + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError('msglen must be a non-negative integer') + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError('maxfds must be a positive integer') + + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize)) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError(f'Received unexpected ancillary data; message = {message!r}, ' + f'cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}') + + fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + if not message: + raise ValueError('message must not be empty') + if not fds: + raise ValueError('fds must not be empty') + + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [(socket.SOL_SOCKET, socket.SCM_RIGHTS, # type: ignore[list-item] + fdarray)] + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard('accepting connections from') + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard('accepting connections from') + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard('reading from') + self._send_guard = ResourceGuard('writing to') + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +async def connect_tcp(host: str, port: int, + local_address: Optional[IPSockAddrType] = None) -> SocketStream: + family = socket.AF_INET6 if ':' in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + +async def connect_unix(path: str) -> UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool +) -> Union[UDPSocket, ConnectedUDPSocket]: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + +getaddrinfo = trio.socket.getaddrinfo +getnameinfo = trio.socket.getnameinfo + + +async def wait_socket_readable(sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError('reading from') from None + + +async def wait_socket_writable(sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError('writing to') from None + + +# +# Synchronization +# + +class Event(BaseEvent): + def __new__(cls) -> 'Event': + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> DeprecatedAwaitable: + self.__original.set() + return DeprecatedAwaitable(self.set) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, *args: Any, original: Optional[trio.CapacityLimiter] = None) -> None: + self.__original = original or trio.CapacityLimiter(*args) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.__original.acquire_nowait() + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + self.__original.acquire_on_behalf_of_nowait(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, total_tokens=orig.total_tokens, + borrowers=orig.borrowers, tasks_waiting=orig.tasks_waiting) + + +_capacity_limiter_wrapper: RunVar = RunVar('_capacity_limiter_wrapper') + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter(original=trio.to_thread.current_default_thread_limiter()) + _capacity_limiter_wrapper.set(limiter) + return limiter + + +# +# Signal handling +# + +class _SignalReceiver(DeprecatedAsyncContextManager[T]): + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + def __enter__(self) -> T: + return self._cm.__enter__() + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def open_signal_receiver(*signals: Signals) -> _SignalReceiver: + cm = trio.open_signal_receiver(*signals) + return _SignalReceiver(cm) + +# +# Testing and debugging +# + + +def get_current_task() -> TaskInfo: + task = trio_lowlevel.current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + +def get_running_tasks() -> List[TaskInfo]: + root_task = trio_lowlevel.current_root_task() + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: List[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + +def wait_all_tasks_blocked() -> Awaitable[None]: + import trio.testing + return trio.testing.wait_all_tasks_blocked() + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from collections import deque + from queue import Queue + + self._call_queue: "Queue[Callable[..., object]]" = Queue() + self._result_queue: Deque[Outcome] = deque() + self._stop_event: Optional[trio.Event] = None + self._nursery: Optional[trio.Nursery] = None + self._options = options + + async def _trio_main(self) -> None: + self._stop_event = trio.Event() + async with trio.open_nursery() as self._nursery: + await self._stop_event.wait() + + async def _call_func(self, func: Callable[..., Awaitable[object]], + args: tuple, kwargs: dict) -> None: + try: + retval = await func(*args, **kwargs) + except BaseException as exc: + self._result_queue.append(Error(exc)) + else: + self._result_queue.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._nursery = None + + def close(self) -> None: + if self._stop_event: + self._stop_event.set() + while self._nursery is not None: + self._call_queue.get()() + + def call(self, func: Callable[..., Awaitable[T_Retval]], + *args: object, **kwargs: object) -> T_Retval: + if self._nursery is None: + trio.lowlevel.start_guest_run( + self._trio_main, run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, **self._options) + while self._nursery is None: + self._call_queue.get()() + + self._nursery.start_soon(self._call_func, func, args, kwargs) + while not self._result_queue: + self._call_queue.get()() + + outcome = self._result_queue.pop() + return outcome.unwrap() diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/__init__.py b/.venv/lib/python3.9/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_compat.py b/.venv/lib/python3.9/site-packages/anyio/_core/_compat.py new file mode 100644 index 0000000..8a0cfd0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_compat.py @@ -0,0 +1,175 @@ +from abc import ABCMeta, abstractmethod +from contextlib import AbstractContextManager +from types import TracebackType +from typing import ( + TYPE_CHECKING, Any, AsyncContextManager, Callable, ContextManager, Generator, Generic, + Iterable, List, Optional, Tuple, Type, TypeVar, Union, overload) +from warnings import warn + +if TYPE_CHECKING: + from ._testing import TaskInfo +else: + TaskInfo = object + +T = TypeVar('T') +AnyDeprecatedAwaitable = Union['DeprecatedAwaitable', 'DeprecatedAwaitableFloat', + 'DeprecatedAwaitableList[T]', TaskInfo] + + +@overload +async def maybe_async(__obj: TaskInfo) -> TaskInfo: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitableFloat') -> float: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitableList[T]') -> List[T]: + ... + + +@overload +async def maybe_async(__obj: 'DeprecatedAwaitable') -> None: + ... + + +async def maybe_async(__obj: 'AnyDeprecatedAwaitable[T]') -> Union[TaskInfo, float, List[T], None]: + """ + Await on the given object if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were converted from coroutine functions into regular functions. + + Do **not** try to use this for any other purpose! + + :return: the result of awaiting on the object if coroutine, or the object itself otherwise + + .. versionadded:: 2.2 + + """ + return __obj._unwrap() + + +class _ContextManagerWrapper: + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + async def __aenter__(self) -> T: + return self._cm.__enter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def maybe_async_cm(cm: Union[ContextManager[T], AsyncContextManager[T]]) -> AsyncContextManager[T]: + """ + Wrap a regular context manager as an async one if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were changed to return regular context managers instead of async ones. + + :param cm: a regular or async context manager + :return: an async context manager + + .. versionadded:: 2.2 + + """ + if not isinstance(cm, AbstractContextManager): + raise TypeError('Given object is not an context manager') + + return _ContextManagerWrapper(cm) + + +def _warn_deprecation(awaitable: 'AnyDeprecatedAwaitable[Any]', stacklevel: int = 1) -> None: + warn(f'Awaiting on {awaitable._name}() is deprecated. Use "await ' + f'anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x ' + f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.', + DeprecationWarning, stacklevel=stacklevel + 1) + + +class DeprecatedAwaitable: + def __init__(self, func: Callable[..., 'DeprecatedAwaitable']): + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, None]: + _warn_deprecation(self) + if False: + yield + + def __reduce__(self) -> Tuple[Type[None], Tuple[()]]: + return type(None), () + + def _unwrap(self) -> None: + return None + + +class DeprecatedAwaitableFloat(float): + def __new__( + cls, x: float, func: Callable[..., 'DeprecatedAwaitableFloat'] + ) -> 'DeprecatedAwaitableFloat': + return super().__new__(cls, x) + + def __init__(self, x: float, func: Callable[..., 'DeprecatedAwaitableFloat']): + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, float]: + _warn_deprecation(self) + if False: + yield + + return float(self) + + def __reduce__(self) -> Tuple[Type[float], Tuple[float]]: + return float, (float(self),) + + def _unwrap(self) -> float: + return float(self) + + +class DeprecatedAwaitableList(List[T]): + def __init__(self, iterable: Iterable[T] = (), *, + func: Callable[..., 'DeprecatedAwaitableList[T]']): + super().__init__(iterable) + self._name = f'{func.__module__}.{func.__qualname__}' + + def __await__(self) -> Generator[None, None, List[T]]: + _warn_deprecation(self) + if False: + yield + + return list(self) + + def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]: + return list, (list(self),) + + def _unwrap(self) -> List[T]: + return list(self) + + +class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta): + @abstractmethod + def __enter__(self) -> T: + pass + + @abstractmethod + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + pass + + async def __aenter__(self) -> T: + warn(f'Using {self.__class__.__name__} as an async context manager has been deprecated. ' + f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to ' + f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if ' + f'you are completely migrating to AnyIO 3+.', DeprecationWarning) + return self.__enter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return self.__exit__(exc_type, exc_val, exc_tb) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_eventloop.py b/.venv/lib/python3.9/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..9de0a84 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,140 @@ +import math +import sys +import threading +from contextlib import contextmanager +from importlib import import_module +from typing import Any, Callable, Coroutine, Dict, Generator, Optional, Tuple, Type, TypeVar + +import sniffio + +# This must be updated when new backends are introduced +from ._compat import DeprecatedAwaitableFloat + +BACKENDS = 'asyncio', 'trio' + +T_Retval = TypeVar('T_Retval') +threadlocals = threading.local() + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object, + backend: str = 'asyncio', backend_options: Optional[Dict[str, Any]] = None) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently either + ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` implementation with + (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f'Already running {asynclib_name} in this thread') + + try: + asynclib = import_module(f'..._backends._{backend}', package=__name__) + except ImportError as exc: + raise LookupError(f'No such backend: {backend}') from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return asynclib.run(func, *args, **backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_asynclib().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal monotonic clock of + the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> DeprecatedAwaitableFloat: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) + + +def get_all_backends() -> Tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> Type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_asynclib().CancelledError + + +# +# Private API +# + +@contextmanager +def claim_worker_thread(backend: str) -> Generator[Any, None, None]: + module = sys.modules['anyio._backends._' + backend] + threadlocals.current_async_module = module + try: + yield + finally: + del threadlocals.current_async_module + + +def get_asynclib(asynclib_name: Optional[str] = None) -> Any: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + modulename = 'anyio._backends._' + asynclib_name + try: + return sys.modules[modulename] + except KeyError: + return import_module(modulename) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_exceptions.py b/.venv/lib/python3.9/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..06db05d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,85 @@ +from traceback import format_exception +from typing import List + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external causes + (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise + misbehaves. + """ + + +class BusyResourceError(Exception): + """Raised when two tasks are trying to read from or write to the same resource concurrently.""" + + def __init__(self, action: str): + super().__init__(f'Another task is already {action} this resource') + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__(f'The delimiter was not found among the first {max_bytes} bytes') + + +class EndOfStream(Exception): + """Raised when trying to read from a stream that has been closed from the other end.""" + + +class ExceptionGroup(BaseException): + """ + Raised when multiple exceptions have been raised in a task group. + + :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together + """ + + SEPARATOR = '----------------------------\n' + + exceptions: List[BaseException] + + def __str__(self) -> str: + tracebacks = [''.join(format_exception(type(exc), exc, exc.__traceback__)) + for exc in self.exceptions] + return f'{len(self.exceptions)} exceptions were raised in the task group:\n' \ + f'{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}' + + def __repr__(self) -> str: + exception_reprs = ', '.join(repr(exc) for exc in self.exceptions) + return f'<{self.__class__.__name__}: {exception_reprs}>' + + +class IncompleteRead(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__('The stream was closed before the read operation could be completed') + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not + found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_fileio.py b/.venv/lib/python3.9/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..2aee21a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,529 @@ +import os +import pathlib +import sys +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, TYPE_CHECKING, Any, AnyStr, AsyncIterator, Callable, Generic, Iterable, Iterator, List, + Optional, Sequence, Tuple, Union, cast, overload) + +from .. import to_thread +from ..abc import AsyncResource + +if sys.version_info >= (3, 8): + from typing import Final +else: + from typing_extensions import Final + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the following + blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the underlying file + at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: 'AsyncFile[bytes]', size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> List[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: 'AsyncFile[bytes]', b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: 'AsyncFile[bytes]', b: ReadableBuffer) -> int: ... + + @overload + async def write(self: 'AsyncFile[str]', b: str) -> int: ... + + async def write(self, b: Union[ReadableBuffer, str]) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines(self: 'AsyncFile[bytes]', lines: Iterable[ReadableBuffer]) -> None: ... + + @overload + async def writelines(self: 'AsyncFile[str]', lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Union[Iterable[ReadableBuffer], Iterable[str]]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: Optional[int] = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file(file: Union[str, 'PathLike[str]', int], mode: OpenBinaryMode, + buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[bytes]: + ... + + +@overload +async def open_file(file: Union[str, 'PathLike[str]', int], mode: OpenTextMode = ..., + buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ..., closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ...) -> AsyncFile[str]: + ... + + +async def open_file(file: Union[str, 'PathLike[str]', int], mode: str = 'r', buffering: int = -1, + encoding: Optional[str] = None, errors: Optional[str] = None, + newline: Optional[str] = None, closefd: bool = True, + opener: Optional[Callable[[str, int], int]] = None) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync(open, file, mode, buffering, encoding, errors, newline, + closefd, opener) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator['Path']): + iterator: Iterator['PathLike[str]'] + + async def __anext__(self) -> 'Path': + nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) + if nextval is None: + raise StopAsyncIteration from None + + return Path(cast('PathLike[str]', nextval)) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but + it is compatible with the :class:`os.PathLike` interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the + deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = '_path', '__weakref__' + + __weakref__: Any + + def __init__(self, *args: Union[str, 'PathLike[str]']) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f'{self.__class__.__name__}({self.as_posix()!r})' + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: 'Path') -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: Any) -> 'Path': + return Path(self._path / other) + + def __rtruediv__(self, other: Any) -> 'Path': + return Path(other) / self + + @property + def parts(self) -> Tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence['Path']: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> 'Path': + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> List[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> 'Path': + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, *other: Union[str, 'PathLike[str]']) -> bool: + try: + self.relative_to(*other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> 'Path': + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, cancellable=True) + + async def expanduser(self) -> 'Path': + return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) + + def glob(self, pattern: str) -> AsyncIterator['Path']: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, cancellable=True) + + async def hardlink_to(self, target: Union[str, pathlib.Path, 'Path']) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> 'Path': + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync(self._path.is_block_device, cancellable=True) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync(self._path.is_char_device, cancellable=True) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, cancellable=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, cancellable=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, cancellable=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, cancellable=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, cancellable=True) + + def iterdir(self) -> AsyncIterator['Path']: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: Union[str, 'PathLike[str]']) -> 'Path': + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, cancellable=True) + + async def mkdir(self, mode: int = 0o777, parents: bool = False, + exist_ok: bool = False) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open(self, mode: OpenBinaryMode, buffering: int = ..., encoding: Optional[str] = ..., + errors: Optional[str] = ..., newline: Optional[str] = ...) -> AsyncFile[bytes]: + ... + + @overload + async def open(self, mode: OpenTextMode = ..., buffering: int = ..., + encoding: Optional[str] = ..., errors: Optional[str] = ..., + newline: Optional[str] = ...) -> AsyncFile[str]: + ... + + async def open(self, mode: str = 'r', buffering: int = -1, encoding: Optional[str] = None, + errors: Optional[str] = None, newline: Optional[str] = None) -> AsyncFile[Any]: + fp = await to_thread.run_sync(self._path.open, mode, buffering, encoding, errors, newline) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, cancellable=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text(self, encoding: Optional[str] = None, errors: Optional[str] = None) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + def relative_to(self, *other: Union[str, 'PathLike[str]']) -> 'Path': + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> 'Path': + target = await to_thread.run_sync(os.readlink, self._path) + return Path(cast(str, target)) + + async def rename(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path': + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: Union[str, pathlib.PurePath, 'Path']) -> 'Path': + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> 'Path': + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, cancellable=True)) + + def rglob(self, pattern: str) -> AsyncIterator['Path']: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: Union[str, bytes, int, pathlib.Path, 'Path']) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync(self._path.samefile, other_path, cancellable=True) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, cancellable=True) + + async def symlink_to(self, target: Union[str, pathlib.Path, 'Path'], + target_is_directory: bool = False) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + def with_name(self, name: str) -> 'Path': + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> 'Path': + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> 'Path': + return Path(self._path.with_suffix(suffix)) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text(self, data: str, encoding: Optional[str] = None, + errors: Optional[str] = None, newline: Optional[str] = None) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open('w', encoding=encoding, errors=errors, newline=newline) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_resources.py b/.venv/lib/python3.9/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9414f7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_resources.py @@ -0,0 +1,16 @@ +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_signals.py b/.venv/lib/python3.9/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..f761982 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_signals.py @@ -0,0 +1,22 @@ +from typing import AsyncIterator + +from ._compat import DeprecatedAsyncContextManager +from ._eventloop import get_asynclib + + +def open_signal_receiver(*signals: int) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields signal + numbers + + .. warning:: Windows does not support signals natively so it is best to avoid relying on this + in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for the given + signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_asynclib().open_signal_receiver(*signals) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_sockets.py b/.venv/lib/python3.9/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..c086edc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,506 @@ +import socket +import ssl +import sys +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from pathlib import Path +from socket import AddressFamily, SocketKind +from typing import Awaitable, List, Optional, Tuple, Union, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketListener, SocketStream, UDPSocket, + UNIXSocketStream) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_asynclib +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +IPPROTO_IPV6 = getattr(socket, 'IPPROTO_IPV6', 41) # https://bugs.python.org/issue29515 + +GetAddrInfoReturnType = List[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]] +AnyIPAddressFamily = Literal[AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, + AddressFamily.AF_INET6] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + ssl_context: Optional[ssl.SSLContext] = ..., tls_standard_compatible: bool = ..., + tls_hostname: str, happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + ssl_context: ssl.SSLContext, tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + tls: Literal[True], ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ... +) -> TLSStream: + ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + tls: Literal[False], ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ... +) -> SocketStream: + ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = ..., + happy_eyeballs_delay: float = ... +) -> SocketStream: + ... + + +async def connect_tcp( + remote_host: IPAddressType, remote_port: int, *, local_host: Optional[IPAddressType] = None, + tls: bool = False, ssl_context: Optional[ssl.SSLContext] = None, + tls_standard_compatible: bool = True, tls_hostname: Optional[str] = None, + happy_eyeballs_delay: float = 0.25 +) -> Union[SocketStream, TLSStream]: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555). + If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until + one connection attempt succeeds. If the first attempt does not connected within 250 + milliseconds, a second attempt is started using the next address in the list, and so on. + On IPv6 enabled systems, an IPv6 address (if available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing + the stream and requires that the server does this as well. Otherwise, + :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to the value + of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: Optional[SocketStream] = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_asynclib() + local_address: Optional[IPSockAddrType] = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo(target_host, remote_port, family=family, + type=socket.SOCK_STREAM) + + # Organize the list so that the first address is an IPv6 address (if available) and the + # second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: List[Tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: List[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) + raise OSError('All connection attempts failed') from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap(connected_stream, server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: Union[str, 'PathLike[str]']) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = str(Path(path)) + return await get_asynclib().connect_unix(path) + + +async def create_tcp_listener( + *, local_host: Optional[IPAddressType] = None, local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, backlog: int = 65536, + reuse_port: bool = False +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4 + and IPv6 interfaces. To listen on all interfaces on a specific address family, use + ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``interface`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_asynclib() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo(local_host, local_port, family=family, # type: ignore[arg-type] + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + listeners: List[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + for fam, *_, sockaddr in sorted(set(gai_res)): + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address reuse. + if sys.platform == 'win32': + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.TCPSocketListener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: Union[str, 'PathLike[str]'], *, mode: Optional[int] = None, + backlog: int = 65536) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be removed first. + + """ + path_str = str(path) + path = Path(path) + if path.is_socket(): + path.unlink() + + backlog = min(backlog, 65536) + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + try: + await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, cancellable=True) + + raw_socket.listen(backlog) + return get_asynclib().UNIXSocketListener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, *, + local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local machine, + making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo(str(local_host), local_port, family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ('::', 0) + else: + local_address = ('0.0.0.0', 0) + + return await get_asynclib().create_udp_socket(family, local_address, None, reuse_port) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, remote_port: int, *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: Optional[IPAddressType] = None, local_port: int = 0, reuse_port: bool = False +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, and any packets + sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo(str(local_host), local_port, family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo(str(remote_host), remote_port, family=family, + type=socket.SOCK_DGRAM) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + return await get_asynclib().create_udp_socket(family, local_address, remote_address, + reuse_port) + + +async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *, + family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0, + proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) IDNA 2008 + standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host = host.encode('ascii') + except UnicodeEncodeError: + import idna + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_asynclib().getaddrinfo(encoded_host, port, family=family, type=type, + proto=proto, flags=flags) + return [(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_asynclib().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_asynclib().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_asynclib().wait_socket_writable(sock) + + +# +# Private API +# + +def convert_ipv6_sockaddr( + sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]] +) -> Tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) + if scope_id: + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return cast(Tuple[str, int], sockaddr) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_streams.py b/.venv/lib/python3.9/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..f43875c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_streams.py @@ -0,0 +1,42 @@ +import math +from typing import Any, Optional, Tuple, Type, TypeVar, overload + +from ..streams.memory import ( + MemoryObjectReceiveStream, MemoryObjectSendStream, MemoryObjectStreamState) + +T_Item = TypeVar('T_Item') + + +@overload +def create_memory_object_stream( + max_buffer_size: float, item_type: Type[T_Item] +) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + ... + + +@overload +def create_memory_object_stream( + max_buffer_size: float = 0 +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + ... + + +def create_memory_object_stream( + max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + """ + Create a memory object stream. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking + :param item_type: type of item, for marking the streams with the right generic type for + static typing (not used at run time) + :return: a tuple of (send stream, receive stream) + + """ + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError('max_buffer_size must be either an integer or math.inf') + if max_buffer_size < 0: + raise ValueError('max_buffer_size cannot be negative') + + state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) + return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.py b/.venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..f4ae139 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,99 @@ +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import AsyncIterable, List, Mapping, Optional, Sequence, Union, cast + +from ..abc import Process +from ._eventloop import get_asynclib +from ._tasks import create_task_group + + +async def run_process(command: Union[str, Sequence[str]], *, input: Optional[bytes] = None, + stdout: int = PIPE, stderr: int = PIPE, check: bool = True, + cwd: Union[str, bytes, 'PathLike[str]', None] = None, + env: Optional[Mapping[str, str]] = None, start_new_session: bool = False, + ) -> 'CompletedProcess[bytes]': + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process + terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the command + :param env: if not ``None``, this mapping replaces the inherited environment variables from the + parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a + nonzero return code + + """ + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process(command, stdin=PIPE if input else DEVNULL, stdout=stdout, + stderr=stderr, cwd=cwd, env=env, + start_new_session=start_new_session) as process: + stream_contents: List[Optional[bytes]] = [None, None] + try: + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + except BaseException: + process.kill() + raise + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process(command: Union[str, Sequence[str]], *, stdin: int = PIPE, + stdout: int = PIPE, stderr: int = PIPE, + cwd: Union[str, bytes, 'PathLike[str]', None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param stdin: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the environment + variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an asynchronous process object + + """ + shell = isinstance(command, str) + return await get_asynclib().open_process(command, shell=shell, stdin=stdin, stdout=stdout, + stderr=stderr, cwd=cwd, env=env, + start_new_session=start_new_session) diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_synchronization.py b/.venv/lib/python3.9/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..d75afed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,566 @@ +from collections import deque +from dataclasses import dataclass +from types import TracebackType +from typing import Deque, Optional, Tuple, Type +from warnings import warn + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._compat import DeprecatedAwaitable +from ._eventloop import get_asynclib +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this + limiter + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: Tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not + held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: Optional[TaskInfo] + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + tasks_waiting: int + + +class Event: + def __new__(cls) -> 'Event': + return get_asynclib().Event() + + def set(self) -> DeprecatedAwaitable: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class Lock: + _owner_task: Optional[TaskInfo] = None + + def __init__(self) -> None: + self._waiters: Deque[Tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + elif self._owner_task == task: + self.release() + + raise + + assert self._owner_task == task + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError('Attempted to acquire an already held Lock') + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> DeprecatedAwaitable: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError('The current task is not holding this lock') + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: Optional[TaskInfo] = None + + def __init__(self, lock: Optional[Lock] = None): + self._lock = lock or Lock() + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError('The current task is not holding the underlying lock') + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> DeprecatedAwaitable: + """Release the underlying lock.""" + self._lock.release() + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: Optional[int] = None): + if not isinstance(initial_value, int): + raise TypeError('initial_value must be an integer') + if initial_value < 0: + raise ValueError('initial_value must be >= 0') + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError('max_value must be an integer or None') + if max_value < initial_value: + raise ValueError('max_value must be equal to or higher than initial_value') + + self._value = initial_value + self._max_value = max_value + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> 'Semaphore': + await self.acquire() + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + else: + self.release() + + raise + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> DeprecatedAwaitable: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError('semaphore released too many times') + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + return DeprecatedAwaitable(self.release) + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> Optional[int]: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> 'CapacityLimiter': + return get_asynclib().CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + async def set_total_tokens(self, value: float) -> None: + warn('CapacityLimiter.set_total_tokens has been deprecated. Set the value of the' + '"total_tokens" attribute directly.', DeprecationWarning) + self.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> DeprecatedAwaitable: + """ + Acquire a token for the current task without waiting for one to become available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + :raises RuntimeError: if the current task has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +def create_lock() -> Lock: + """ + Create an asynchronous lock. + + :return: a lock object + + .. deprecated:: 3.0 + Use :class:`~Lock` directly. + + """ + warn('create_lock() is deprecated -- use Lock() directly', DeprecationWarning) + return Lock() + + +def create_condition(lock: Optional[Lock] = None) -> Condition: + """ + Create an asynchronous condition. + + :param lock: the lock to base the condition object on + :return: a condition object + + .. deprecated:: 3.0 + Use :class:`~Condition` directly. + + """ + warn('create_condition() is deprecated -- use Condition() directly', DeprecationWarning) + return Condition(lock=lock) + + +def create_event() -> Event: + """ + Create an asynchronous event object. + + :return: an event object + + .. deprecated:: 3.0 + Use :class:`~Event` directly. + + """ + warn('create_event() is deprecated -- use Event() directly', DeprecationWarning) + return get_asynclib().Event() + + +def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore: + """ + Create an asynchronous semaphore. + + :param value: the semaphore's initial value + :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the + semaphore's value would exceed this number + :return: a semaphore object + + .. deprecated:: 3.0 + Use :class:`~Semaphore` directly. + + """ + warn('create_semaphore() is deprecated -- use Semaphore() directly', DeprecationWarning) + return Semaphore(value, max_value=max_value) + + +def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: + """ + Create a capacity limiter. + + :param total_tokens: the total number of tokens available for borrowing (can be an integer or + :data:`math.inf`) + :return: a capacity limiter object + + .. deprecated:: 3.0 + Use :class:`~CapacityLimiter` directly. + + """ + warn('create_capacity_limiter() is deprecated -- use CapacityLimiter() directly', + DeprecationWarning) + return get_asynclib().CapacityLimiter(total_tokens) + + +class ResourceGuard: + __slots__ = 'action', '_guarded' + + def __init__(self, action: str): + self.action = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + self._guarded = False + return None diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_tasks.py b/.venv/lib/python3.9/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..4db1cc4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +import math +from types import TracebackType +from typing import Optional, Type +from warnings import warn + +from ..abc._tasks import TaskGroup, TaskStatus +from ._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, DeprecatedAwaitableFloat +from ._eventloop import get_asynclib + + +class _IgnoredTaskStatus(TaskStatus): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope(DeprecatedAsyncContextManager['CancelScope']): + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__(cls, *, deadline: float = math.inf, shield: bool = False) -> 'CancelScope': + return get_asynclib().CancelScope(shield=shield, deadline=deadline) + + def cancel(self) -> DeprecatedAwaitable: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> 'CancelScope': + raise NotImplementedError + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + raise NotImplementedError + + +def open_cancel_scope(*, shield: bool = False) -> CancelScope: + """ + Open a cancel scope. + + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + .. deprecated:: 3.0 + Use :class:`~CancelScope` directly. + + """ + warn('open_cancel_scope() is deprecated -- use CancelScope() directly', DeprecationWarning) + return get_asynclib().CancelScope(shield=shield) + + +class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]): + def __init__(self, cancel_scope: CancelScope): + self._cancel_scope = cancel_scope + + def __enter__(self) -> CancelScope: + return self._cancel_scope.__enter__() + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if self._cancel_scope.cancel_called: + raise TimeoutError + + return retval + + +def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to + disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\] + + """ + deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf + cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) + return FailAfterContextManager(cancel_scope) + + +def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` + to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = (get_asynclib().current_time() + delay) if delay is not None else math.inf + return get_asynclib().CancelScope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> DeprecatedAwaitableFloat: + """ + Return the nearest deadline among all the cancel scopes effective for the current task. + + :return: a clock value from the event loop's internal clock (``float('inf')`` if there is no + deadline in effect) + :rtype: float + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_effective_deadline(), + current_effective_deadline) + + +def create_task_group() -> 'TaskGroup': + """ + Create a task group. + + :return: a task group + + """ + return get_asynclib().TaskGroup() diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_testing.py b/.venv/lib/python3.9/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..d977eff --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_testing.py @@ -0,0 +1,75 @@ +from typing import Any, Awaitable, Generator, Optional, Union + +from ._compat import DeprecatedAwaitableList, _warn_deprecation +from ._eventloop import get_asynclib + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = '_name', 'id', 'parent_id', 'name', 'coro' + + def __init__(self, id: int, parent_id: Optional[int], name: Optional[str], + coro: Union[Generator, Awaitable[Any]]): + func = get_current_task + self._name = f'{func.__module__}.{func.__qualname__}' + self.id: int = id + self.parent_id: Optional[int] = parent_id + self.name: Optional[str] = name + self.coro: Union[Generator, Awaitable[Any]] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f'{self.__class__.__name__}(id={self.id!r}, name={self.name!r})' + + def __await__(self) -> Generator[None, None, "TaskInfo"]: + _warn_deprecation(self) + if False: + yield + + return self + + def _unwrap(self) -> 'TaskInfo': + return self + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_asynclib().get_current_task() + + +def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + tasks = get_asynclib().get_running_tasks() + return DeprecatedAwaitableList(tasks, func=get_running_tasks) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_asynclib().wait_all_tasks_blocked() diff --git a/.venv/lib/python3.9/site-packages/anyio/_core/_typedattr.py b/.venv/lib/python3.9/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..797287d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,79 @@ +import sys +from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload + +from ._exceptions import TypedAttributeLookupError + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + +T_Attr = TypeVar('T_Attr') +T_Default = TypeVar('T_Default') +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: Dict[str, Any] = getattr(cls, '__annotations__', {}) + for attrname in dir(cls): + if not attrname.startswith('_') and attrname not in annotations: + raise TypeError(f'Attribute {attrname!r} is missing its type annotation') + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding values. + + If the provider wraps another provider, the attributes from that wrapper should also be + included in the returned mapping (but the wrapper may override the callables from the + wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: + ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]: + ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for + :param default: the value that should be returned if no value is found for the attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was + given + + """ + try: + return self.extra_attributes[attribute]() + except KeyError: + if default is undefined: + raise TypedAttributeLookupError('Attribute not found') from None + else: + return default diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/__init__.py b/.venv/lib/python3.9/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..592ef0e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/__init__.py @@ -0,0 +1,37 @@ +__all__ = ('AsyncResource', 'IPAddressType', 'IPSockAddrType', 'SocketAttribute', 'SocketStream', + 'SocketListener', 'UDPSocket', 'UNIXSocketStream', 'UDPPacketType', + 'ConnectedUDPSocket', 'UnreliableObjectReceiveStream', 'UnreliableObjectSendStream', + 'UnreliableObjectStream', 'ObjectReceiveStream', 'ObjectSendStream', 'ObjectStream', + 'ByteReceiveStream', 'ByteSendStream', 'ByteStream', 'AnyUnreliableByteReceiveStream', + 'AnyUnreliableByteSendStream', 'AnyUnreliableByteStream', 'AnyByteReceiveStream', + 'AnyByteSendStream', 'AnyByteStream', 'Listener', 'Process', 'Event', + 'Condition', 'Lock', 'Semaphore', 'CapacityLimiter', 'CancelScope', 'TaskGroup', + 'TaskStatus', 'TestRunner', 'BlockingPortal') + +from typing import Any + +from ._resources import AsyncResource +from ._sockets import ( + ConnectedUDPSocket, IPAddressType, IPSockAddrType, SocketAttribute, SocketListener, + SocketStream, UDPPacketType, UDPSocket, UNIXSocketStream) +from ._streams import ( + AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, AnyUnreliableByteReceiveStream, + AnyUnreliableByteSendStream, AnyUnreliableByteStream, ByteReceiveStream, ByteSendStream, + ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, ObjectStream, + UnreliableObjectReceiveStream, UnreliableObjectSendStream, UnreliableObjectStream) +from ._subprocesses import Process +from ._tasks import TaskGroup, TaskStatus +from ._testing import TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore +from .._core._tasks import CancelScope +from ..from_thread import BlockingPortal + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, '__module__', '').startswith('anyio.abc.'): + value.__module__ = __name__ diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/_resources.py b/.venv/lib/python3.9/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..4594e6e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/_resources.py @@ -0,0 +1,26 @@ +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Optional, Type, TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, and calls + :meth:`aclose` on exit. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/_sockets.py b/.venv/lib/python3.9/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..a05151e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,156 @@ +import socket +from abc import abstractmethod +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import ( + Any, AsyncContextManager, Callable, Collection, Dict, List, Mapping, Optional, Tuple, Type, + TypeVar, Union) + +from .._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute +from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +T_Retval = TypeVar('T_Retval') + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: Dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert(self._raw_socket.getsockname()), + SocketAttribute.raw_socket: lambda: self._raw_socket + } + try: + peername: Optional[Tuple[str, int]] = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = lambda: self._raw_socket.getsockname()[1] + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[Union[int, IOBase]]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file or socket + objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + from .. import create_task_group + + context_manager: AsyncContextManager + if task_group is None: + task_group = context_manager = create_task_group() + else: + # Can be replaced with AsyncExitStack once on py3.7+ + context_manager = _NullAsyncContextManager() + + async with context_manager: + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/_streams.py b/.venv/lib/python3.9/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..635b818 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/_streams.py @@ -0,0 +1,187 @@ +from abc import abstractmethod +from typing import Any, Callable, Generic, Optional, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar('T_Item') +T_Stream = TypeVar('T_Stream') + + +class UnreliableObjectReceiveStream(Generic[T_Item], AsyncResource, TypedAttributeProvider): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in which they + were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the given type + parameter. + """ + + def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]": + return self + + async def __anext__(self) -> T_Item: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_Item: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream(Generic[T_Item], AsyncResource, TypedAttributeProvider): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the recipient(s) in the + same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_Item) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream(UnreliableObjectReceiveStream[T_Item], + UnreliableObjectSendStream[T_Item]): + """ + A bidirectional message stream which does not guarantee the order or reliability of message + delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]): + """ + A receive message stream which guarantees that messages are received in the same order in + which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_Item]): + """ + A send message stream which guarantees that messages are delivered in the same order in which + they were sent, without missing any messages in the middle. + """ + + +class ObjectStream(ObjectReceiveStream[T_Item], ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item]): + """ + A bidirectional message stream which guarantees the order and reliability of message delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more than + 65536 bytes. + """ + + def __aiter__(self) -> 'ByteReceiveStream': + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty :class:`bytes` object, + and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[UnreliableObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling each + accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.py b/.venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..1e633fb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,78 @@ +from abc import abstractmethod +from signal import Signals +from typing import Optional + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> Optional[int]: + """ + The return code of the process. If the process has not yet terminated, this will be + ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> Optional[ByteSendStream]: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> Optional[ByteReceiveStream]: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> Optional[ByteReceiveStream]: + """The stream for the standard error output of the process.""" diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/_tasks.py b/.venv/lib/python3.9/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..bed02d8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,87 @@ +import typing +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Any, Callable, Coroutine, Optional, Type, TypeVar +from warnings import warn + +if typing.TYPE_CHECKING: + from anyio._core._tasks import CancelScope + +T_Retval = TypeVar('T_Retval') + + +class TaskStatus(metaclass=ABCMeta): + @abstractmethod + def started(self, value: object = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: 'CancelScope' + + async def spawn(self, func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, name: object = None) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. deprecated:: 3.0 + Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn('spawn() is deprecated -- use start_soon() (without the "await") instead', + DeprecationWarning) + self.start_soon(func, *args, name=name) + + @abstractmethod + def start_soon(self, func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, name: object = None) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start(self, func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, name: object = None) -> object: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> 'TaskGroup': + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/.venv/lib/python3.9/site-packages/anyio/abc/_testing.py b/.venv/lib/python3.9/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..2cc9822 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/abc/_testing.py @@ -0,0 +1,37 @@ +import types +from abc import ABCMeta, abstractmethod +from typing import Any, Awaitable, Callable, Dict, Optional, Type, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the same event + loop. + """ + + def __enter__(self) -> 'TestRunner': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[types.TracebackType]) -> Optional[bool]: + self.close() + return None + + @abstractmethod + def close(self) -> None: + """Close the event loop.""" + + @abstractmethod + def call(self, func: Callable[..., Awaitable[_T]], + *args: object, **kwargs: Dict[str, Any]) -> _T: + """ + Call the given function within the backend's event loop. + + :param func: a callable returning an awaitable + :param args: positional arguments to call ``func`` with + :param kwargs: keyword arguments to call ``func`` with + :return: the return value of ``func`` + """ diff --git a/.venv/lib/python3.9/site-packages/anyio/from_thread.py b/.venv/lib/python3.9/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..0dfcec9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/from_thread.py @@ -0,0 +1,416 @@ +import threading +from asyncio import iscoroutine +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from types import TracebackType +from typing import ( + Any, AsyncContextManager, Callable, ContextManager, Coroutine, Dict, Generator, Iterable, + Optional, Tuple, Type, TypeVar, Union, cast, overload) +from warnings import warn + +from ._core import _eventloop +from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus + +T_Retval = TypeVar('T_Retval') +T_co = TypeVar('T_co') + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError('This function can only be run from an AnyIO worker thread') + + return asynclib.run_async_from_thread(func, *args) + + +def run_async_from_thread(func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object) -> T_Retval: + warn('run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead', + DeprecationWarning) + return run(func, *args) + + +def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError('This function can only be run from an AnyIO worker thread') + + return asynclib.run_sync_from_thread(func, *args) + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: + warn('run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead', + DeprecationWarning) + return run_sync(func, *args) + + +class _BlockingAsyncContextManager(AbstractContextManager): + _enter_future: Future + _exit_future: Future + _exit_event: Event + _exit_exc_info: Tuple[Optional[Type[BaseException]], Optional[BaseException], + Optional[TracebackType]] = (None, None, None) + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: 'BlockingPortal'): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> Optional[bool]: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + cm = self._enter_future.result() + return cast(T_co, cm) + + def __exit__(self, __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType]) -> Optional[bool]: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> 'BlockingPortal': + return get_asynclib().BlockingPortal() + + def __init__(self) -> None: + self._event_loop_thread_id: Optional[int] = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> 'BlockingPortal': + await self._task_group.__aenter__() + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Optional[bool]: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError('This portal is not running') + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError('This method cannot be called from the event loop thread') + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them + finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + future: Future) -> None: + def callback(f: Future) -> None: + if f.cancelled() and self._event_loop_thread_id not in (None, threading.get_ident()): + self.call(scope.cancel) + + try: + retval = func(*args, **kwargs) + if iscoroutine(retval): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval + except self._cancelled_exc_class: + future.cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread(self, func: Callable, args: tuple, kwargs: Dict[str, Any], + name: object, future: Future) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, or the + exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + ... + + @overload + def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: + ... + + def call(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def spawn_task(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + ... + + @overload + def spawn_task(self, func: Callable[..., T_Retval], + *args: object, name: object = None) -> "Future[T_Retval]": ... + + def spawn_task(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 2.1 + .. deprecated:: 3.0 + Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn('spawn_task() is deprecated -- use start_task_soon() instead', DeprecationWarning) + return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] + + @overload + def start_task_soon(self, func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + ... + + @overload + def start_task_soon(self, func: Callable[..., T_Retval], + *args: object, name: object = None) -> "Future[T_Retval]": ... + + def start_task_soon(self, func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, name: object = None) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling the + returned future. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task(self, func: Callable[..., Coroutine[Any, Any, Any]], *args: object, + name: object = None) -> Tuple['Future[Any]', Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`TaskGroup.start`. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` is the + value passed to ``task_status.started()`` from within the target function + + .. versionadded:: 3.0 + + """ + def task_done(future: Future) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError('Task exited without calling task_status.started()') + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {'task_status': task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager(self, cm: AsyncContextManager[T_co]) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the + middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +def create_blocking_portal() -> BlockingPortal: + """ + Create a portal for running functions in the event loop thread from external threads. + + Use this function in asynchronous code when you need to allow external threads access to the + event loop where your asynchronous code is currently running. + + .. deprecated:: 3.0 + Use :class:`.BlockingPortal` directly. + + """ + warn('create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() ' + 'directly', DeprecationWarning) + return BlockingPortal() + + +@contextmanager +def start_blocking_portal( + backend: str = 'asyncio', + backend_options: Optional[Dict[str, Any]] = None) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit(_eventloop.run, run_portal, backend=backend, + backend_options=backend_options) + try: + wait(cast(Iterable[Future], [run_future, future]), return_when=FIRST_COMPLETED) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + try: + yield portal + except BaseException: + portal.call(portal.stop, True) + raise + + portal.call(portal.stop, False) + + run_future.result() diff --git a/.venv/lib/python3.9/site-packages/anyio/lowlevel.py b/.venv/lib/python3.9/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..446e9e7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/lowlevel.py @@ -0,0 +1,160 @@ +import enum +import sys +from dataclasses import dataclass +from typing import Any, Dict, Generic, Set, TypeVar, Union, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_asynclib + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +T = TypeVar('T') +D = TypeVar('D') + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().cancel_shielded_checkpoint() + + +def current_token() -> object: + """Return a backend specific token object that can be used to get back to the event loop.""" + return get_asynclib().current_token() + + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]] +_token_wrappers: Dict[Any, '_TokenWrapper'] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = '_token', '__weakref__' + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = '_var', '_value', '_redeemed' + + def __init__(self, var: 'RunVar[T]', value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]]): + self._var = var + self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop.""" + __slots__ = '_name', '_default' + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: Set[_TokenWrapper] = set() + + def __init__(self, name: str, + default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET): + self._name = name + self._default = default + + @property + def _current_vars(self) -> Dict[str, T]: + token = current_token() + while True: + try: + return _run_vars[token] + except TypeError: + # Happens when token isn't weak referable (TrioToken). + # This workaround does mean that some memory will leak on Trio until the problem + # is fixed on their end. + token = _TokenWrapper(token) + self._token_wrappers.add(token) + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> Union[T, D]: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET + ) -> Union[T, D]: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError(f'Run variable "{self._name}" has no value and no default set') + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError('This token does not belong to this RunVar') + + if token._redeemed: + raise ValueError('This token has already been used') + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f'' diff --git a/.venv/lib/python3.9/site-packages/anyio/py.typed b/.venv/lib/python3.9/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/anyio/pytest_plugin.py b/.venv/lib/python3.9/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..d0cc2fb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,152 @@ +from contextlib import contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional, Tuple, cast + +import pytest +import sniffio + +from ._core._eventloop import get_all_backends, get_asynclib +from .abc import TestRunner + +if TYPE_CHECKING: + from _pytest.config import Config + +_current_runner: Optional[TestRunner] = None + + +def extract_backend_and_options(backend: object) -> Tuple[str, Dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError('anyio_backend must be either a string or tuple of (string, dict)') + + +@contextmanager +def get_runner(backend_name: str, backend_options: Dict[str, Any]) -> Iterator[TestRunner]: + global _current_runner + if _current_runner: + yield _current_runner + return + + asynclib = get_asynclib(backend_name) + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend_name) + + try: + backend_options = backend_options or {} + with asynclib.TestRunner(**backend_options) as runner: + _current_runner = runner + yield runner + finally: + _current_runner = None + if token: + sniffio.current_async_library_cvar.reset(token) + + +def pytest_configure(config: "Config") -> None: + config.addinivalue_line('markers', 'anyio: mark the (coroutine function) test to be run ' + 'asynchronously via anyio.') + + +def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs['anyio_backend'] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + gen = func(*args, **kwargs) + try: + value = runner.call(gen.asend, None) + except StopAsyncIteration: + raise RuntimeError('Async generator did not yield') + + yield value + + try: + runner.call(gen.asend, None) + except StopAsyncIteration: + pass + else: + runner.call(gen.aclose) + raise RuntimeError('Async generator fixture did not stop') + else: + yield runner.call(func, *args, **kwargs) + + # Only apply this to coroutine functions and async generator functions in requests that involve + # the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if 'anyio_backend' in request.fixturenames: + has_backend_arg = 'anyio_backend' in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ('anyio_backend',) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, 'hypothesis') else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker('anyio') + own_markers = getattr(obj, 'pytestmark', ()) + if marker or any(marker.name == 'anyio' for marker in own_markers): + pytest.mark.usefixtures('anyio_backend')(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> Optional[bool]: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.call(original_func, **kwargs) + + backend = pyfuncitem.funcargs.get('anyio_backend') + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, 'hypothesis'): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.call(pyfuncitem.obj, **testargs) + + return True + + return None + + +@pytest.fixture(params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> Dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/__init__.py b/.venv/lib/python3.9/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/buffered.py b/.venv/lib/python3.9/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..ee220ca --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/streams/buffered.py @@ -0,0 +1,116 @@ +from dataclasses import dataclass, field +from typing import Any, Callable, Mapping + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving + capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes we get from + # the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[:index + len(delimiter):] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/file.py b/.venv/lib/python3.9/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..9dc0739 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/streams/file.py @@ -0,0 +1,139 @@ +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast + +from .. import ( + BrokenResourceError, ClosedResourceError, EndOfStream, TypedAttributeSet, to_thread, + typed_attribute) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: Dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, 'name'): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, 'PathLike[str]']) -> 'FileReadStream': + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, 'rb') + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, 'PathLike[str]'], + append: bool = False) -> 'FileWriteStream': + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any existing file + at the given path will be truncated + + """ + mode = 'ab' if append else 'wb' + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/memory.py b/.venv/lib/python3.9/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..259b7dd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/streams/memory.py @@ -0,0 +1,256 @@ +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar + +from .. import ( + BrokenResourceError, ClosedResourceError, EndOfStream, WouldBlock, get_cancelled_exc_class) +from .._core._compat import DeprecatedAwaitable +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar('T_Item') + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: Deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: 'OrderedDict[Event, List[T_Item]]' = field(init=False, + default_factory=OrderedDict) + waiting_senders: 'OrderedDict[Event, T_Item]' = field(init=False, default_factory=OrderedDict) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), self.max_buffer_size, self.open_send_channels, + self.open_receive_channels, len(self.waiting_senders), len(self.waiting_receivers)) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_Item: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_Item: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + container: List[T_Item] = [] + self._state.waiting_receivers[receive_event] = container + + try: + await receive_event.wait() + except get_cancelled_exc_class(): + # Ignore the immediate cancellation if we already received an item, so as not to + # lose it + if not container: + raise + finally: + self._state.waiting_receivers.pop(receive_event, None) + + if container: + return container[0] + else: + raise EndOfStream + + def clone(self) -> 'MemoryObjectReceiveStream[T_Item]': + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will the + receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> 'MemoryObjectReceiveStream[T_Item]': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.close() + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_Item) -> DeprecatedAwaitable: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + if self._state.waiting_receivers: + receive_event, container = self._state.waiting_receivers.popitem(last=False) + container.append(item) + receive_event.set() + elif len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + return DeprecatedAwaitable(self.send_nowait) + + async def send(self, item: T_Item) -> None: + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] + raise + + if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] + raise BrokenResourceError + + def clone(self) -> 'MemoryObjectSendStream[T_Item]': + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will the + sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> 'MemoryObjectSendStream[T_Item]': + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> None: + self.close() diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/stapled.py b/.venv/lib/python3.9/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..0d5e7fb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/streams/stapled.py @@ -0,0 +1,124 @@ +from dataclasses import dataclass +from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar + +from ..abc import ( + ByteReceiveStream, ByteSendStream, ByteStream, Listener, ObjectReceiveStream, ObjectSendStream, + ObjectStream, TaskGroup) + +T_Item = TypeVar('T_Item') +T_Stream = TypeVar('T_Stream') + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes} + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self.send_stream.extra_attributes, **self.receive_stream.extra_attributes} + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners moved into + this one. + + Extra attributes are provided from each listener, with each successive listener overriding any + conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: List[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve(self, handler: Callable[[T_Stream], Any], + task_group: Optional[TaskGroup] = None) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/text.py b/.venv/lib/python3.9/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..d352b5b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/streams/text.py @@ -0,0 +1,130 @@ +import codecs +from dataclasses import InitVar, dataclass, field +from typing import Any, Callable, Mapping, Tuple + +from ..abc import ( + AnyByteReceiveStream, AnyByteSendStream, AnyByteStream, ObjectReceiveStream, ObjectSendStream, + ObjectStream) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely + received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults to + ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = 'utf-8' + errors: InitVar[str] = 'strict' + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes (defaults to + ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = 'utf-8' + errors: str = 'strict' + _encoder: Callable[..., Tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on + send. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = 'utf-8' + errors: InitVar[str] = 'strict' + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream(self.transport_stream, encoding=encoding, + errors=errors) + self._send_stream = TextSendStream(self.transport_stream, encoding=encoding, errors=errors) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return {**self._send_stream.extra_attributes, **self._receive_stream.extra_attributes} diff --git a/.venv/lib/python3.9/site-packages/anyio/streams/tls.py b/.venv/lib/python3.9/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..b235426 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/streams/tls.py @@ -0,0 +1,281 @@ +import logging +import re +import ssl +from dataclasses import dataclass +from functools import wraps +from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union + +from .. import BrokenResourceError, EndOfStream, aclose_forcefully, get_cancelled_exc_class +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +T_Retval = TypeVar('T_Retval') +_PCTRTT = Tuple[Tuple[str, str], ...] +_PCTRTTT = Tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + #: the selected ALPN protocol + alpn_protocol: Optional[str] = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: Tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more + #: information) + peer_certificate: Optional[Dict[str, Union[str, _PCTRTTT, _PCTRTT]]] = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: Optional[bytes] = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared between both ends of the TLS connection + shared_ciphers: List[Tuple[str, str, int]] = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being + #: closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap(cls, transport_stream: AnyByteStream, *, server_side: Optional[bool] = None, + hostname: Optional[str] = None, ssl_context: Optional[ssl.SSLContext] = None, + standard_compatible: bool = True) -> 'TLSStream': + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, ``False`` if + this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been + provided, ``False`` otherwise). Used only to create a default context when an explicit + context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default will be + created) + :param standard_compatible: if ``False``, skip the closing handshake when closing the + connection, and don't raise an exception if the peer does the same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, 'OP_IGNORE_UNEXPECTED_EOF'): + ssl_context.options ^= ssl.OP_IGNORE_UNEXPECTED_EOF # type: ignore[attr-defined] + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio(bio_in, bio_out, server_side=server_side, + server_hostname=hostname) + wrapper = cls(transport_stream=transport_stream, + standard_compatible=standard_compatible, _ssl_object=ssl_object, + _read_bio=bio_in, _write_bio=bio_out) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[..., T_Retval], *args: object + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if (isinstance(exc, ssl.SSLEOFError) + or 'UNEXPECTED_EOF_WHILE_READING' in exc.strerror): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> Tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r'TLSv(\d+)(?:\.(\d+))?', tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError(f'send_eof() requires at least TLSv1.3; current ' + f'session uses {tls_version}') + + raise NotImplementedError('send_eof() has not yet been implemented for TLS streams') + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(True), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(), + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session on every + accepted connection. + + If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is + called to do whatever post-mortem processing is deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + f""" + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the ``{__name__}`` + logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + logging.getLogger(__name__).exception('Error during TLS handshake') + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve(self, handler: Callable[[TLSStream], Any], + task_group: Optional[TaskGroup] = None) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/.venv/lib/python3.9/site-packages/anyio/to_process.py b/.venv/lib/python3.9/site-packages/anyio/to_process.py new file mode 100644 index 0000000..463b21c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/to_process.py @@ -0,0 +1,229 @@ +import os +import pickle +import subprocess +import sys +from collections import deque +from importlib.util import module_from_spec, spec_from_file_location +from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast + +from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar('T_Retval') +_process_pool_workers: RunVar[Set[Process]] = RunVar('_process_pool_workers') +_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar( + '_process_pool_idle_workers') +_default_process_limiter: RunVar[CapacityLimiter] = RunVar('_default_process_limiter') + + +async def run_sync( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the worker process running it will be abruptly terminated using SIGKILL (or + ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's running + :param limiter: capacity limiter to use to limit the total amount of processes running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b'\n', 50) + status, length = response.split(b' ') + if status not in (b'RETURN', b'EXCEPTION'): + raise RuntimeError(f'Worker process returned unexpected response: {response!r}') + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b'EXCEPTION': + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(('run', func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_asynclib().setup_process_pool_exit_at_shutdown(workers) + + async with (limiter or current_default_process_limiter()): + # Pop processes from the pool (starting from the most recently used) until we find one that + # hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout)) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or + # longer + now = current_time() + killed_processes: List[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process, idle_since = idle_workers.popleft() + process.kill() + workers.remove(process) + killed_processes.append(process) + + with CancelScope(shield=True): + for process in killed_processes: + await process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, '-u', '-m', __name__] + process = await open_process(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream(cast(ByteReceiveStream, process.stdout)) + with fail_after(20): + message = await buffered.receive(6) + + if message != b'READY\n': + raise BrokenWorkerProcess( + f'Worker process returned unexpected response: {message!r}') + + main_module_path = getattr(sys.modules['__main__'], '__file__', None) + pickled = pickle.dumps(('init', sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess('Error during worker process initialization') from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, 'w') + + stdout.buffer.write(b'READY\n') + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == 'run': + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == 'init': + main_module_path: Optional[str] + sys.path, main_module_path = args + del sys.modules['__main__'] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of __main__ + # (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location('__mp_main__', main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules['__main__'] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b'EXCEPTION' + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b'RETURN' + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b'EXCEPTION' + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b'%s %d\n' % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == '__main__': + process_worker() diff --git a/.venv/lib/python3.9/site-packages/anyio/to_thread.py b/.venv/lib/python3.9/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..5fc9589 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/anyio/to_thread.py @@ -0,0 +1,54 @@ +from typing import Callable, Optional, TypeVar +from warnings import warn + +from ._core._eventloop import get_asynclib +from .abc import CapacityLimiter + +T_Retval = TypeVar('T_Retval') + + +async def run_sync( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the thread will still run its course but its return value (or any raised exception) will be + ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable, + limiter=limiter) + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], *args: object, cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None) -> T_Retval: + warn('run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead', + DeprecationWarning) + return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of concurrent threads. + + :return: a capacity limiter object + + """ + return get_asynclib().current_default_thread_limiter() + + +def current_default_worker_thread_limiter() -> CapacityLimiter: + warn('current_default_worker_thread_limiter() has been deprecated, ' + 'use anyio.to_thread.current_default_thread_limiter() instead', + DeprecationWarning) + return current_default_thread_limiter() diff --git a/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/LICENSE.txt b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/LICENSE.txt new file mode 100644 index 0000000..8aa2645 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) [year] [fullname] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/METADATA b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/METADATA new file mode 100644 index 0000000..2e1afa3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/METADATA @@ -0,0 +1,145 @@ +Metadata-Version: 2.1 +Name: apischema +Version: 0.16.6 +Summary: JSON (de)serialization, *GraphQL* and JSON schema generation using Python typing. +Home-page: https://github.com/wyfo/apischema +Author: Joseph Perez +Author-email: joperez@hotmail.fr +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE.txt +Requires-Dist: dataclasses (==0.7) ; python_version < "3.7" +Provides-Extra: examples +Requires-Dist: graphql-core (>=3.0.0) ; extra == 'examples' +Requires-Dist: attrs ; extra == 'examples' +Requires-Dist: docstring-parser ; extra == 'examples' +Requires-Dist: bson ; extra == 'examples' +Requires-Dist: orjson ; extra == 'examples' +Requires-Dist: pydantic ; extra == 'examples' +Requires-Dist: pytest ; extra == 'examples' +Requires-Dist: sqlalchemy ; extra == 'examples' +Provides-Extra: graphql +Requires-Dist: graphql-core (>=3.0.0) ; extra == 'graphql' + +# apischema + +JSON (de)serialization, *GraphQL* and JSON schema generation using Python typing. + +*apischema* makes your life easier when dealing with API data. + +## Documentation + +[https://wyfo.github.io/apischema/](https://wyfo.github.io/apischema/) + +## Install +```shell +pip install apischema +``` +It requires only Python 3.6+ (and dataclasses [official backport](https://pypi.org/project/dataclasses/) for version 3.6 only) + +*PyPy3* is fully supported. + +## Why another library? + +(If you wonder how this differs from the *pydantic* library, see the [dedicated section of the documentation](https://wyfo.github.io/apischema/difference_with_pydantic/) — there are many differences.) + +This library fulfills the following goals: + +- stay as close as possible to the standard library (dataclasses, typing, etc.) — as a consequence we do not need plugins for editors/linters/etc.; +- be adaptable, provide tools to support any types (ORM, etc.); +- avoid dynamic things like using raw strings for attributes name - play nicely with your IDE. + +No known alternative achieves all of this, and apischema is also [faster](https://wyfo.github.io/apischema/performance_and_benchmark) than all of them. + +On top of that, because APIs are not only JSON, *apischema* is also a complete *GraphQL* library. + +## Example + +```python +from collections.abc import Collection +from dataclasses import dataclass, field +from uuid import UUID, uuid4 + +from graphql import print_schema +from pytest import raises + +from apischema import ValidationError, deserialize, serialize +from apischema.graphql import graphql_schema +from apischema.json_schema import deserialization_schema + + +# Define a schema with standard dataclasses +@dataclass +class Resource: + id: UUID + name: str + tags: set[str] = field(default_factory=set) + + +# Get some data +uuid = uuid4() +data = {"id": str(uuid), "name": "wyfo", "tags": ["some_tag"]} +# Deserialize data +resource = deserialize(Resource, data) +assert resource == Resource(uuid, "wyfo", {"some_tag"}) +# Serialize objects +assert serialize(Resource, resource) == data +# Validate during deserialization +with raises(ValidationError) as err: # pytest checks exception is raised + deserialize(Resource, {"id": "42", "name": "wyfo"}) +assert err.value.errors == [ + {"loc": ["id"], "msg": "badly formed hexadecimal UUID string"} +] +# Generate JSON Schema +assert deserialization_schema(Resource) == { + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid"}, + "name": {"type": "string"}, + "tags": {"type": "array", "items": {"type": "string"}, "uniqueItems": True}, + }, + "required": ["id", "name"], + "additionalProperties": False, +} + + +# Define GraphQL operations +def resources(tags: Collection[str] | None = None) -> Collection[Resource] | None: + ... + + +# Generate GraphQL schema +schema = graphql_schema(query=[resources], id_types={UUID}) +schema_str = """\ +type Query { + resources(tags: [String!]): [Resource!] +} + +type Resource { + id: ID! + name: String! + tags: [String!]! +} +""" +assert print_schema(schema) == schema_str +``` +*apischema* works out of the box with your data model. + +[*Let's start the apischema tour.*](https://wyfo.github.io/apischema/) + +## Changelog + +See [releases](https://github.com/wyfo/apischema/releases) + diff --git a/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/RECORD b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/RECORD new file mode 100644 index 0000000..d73a859 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/RECORD @@ -0,0 +1,133 @@ +apischema-0.16.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +apischema-0.16.6.dist-info/LICENSE.txt,sha256=ACwmltkrXIz5VsEQcrqljq-fat6ZXAMepjXGoe40KtE,1069 +apischema-0.16.6.dist-info/METADATA,sha256=e25BznRCYVpAF92b5HEEZwFX47s_2PYY7BEo9bbCxFE,4727 +apischema-0.16.6.dist-info/RECORD,, +apischema-0.16.6.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +apischema-0.16.6.dist-info/top_level.txt,sha256=zW0-6yTDoOL1hAiW09vIidai_o1RmEB0JfImev9wq44,10 +apischema/__init__.py,sha256=CdbV9VkcY0ioAvF_nn9A3p5nn271HMtPoHU0Nb3OF-8,2404 +apischema/__pycache__/__init__.cpython-39.pyc,, +apischema/__pycache__/aliases.cpython-39.pyc,, +apischema/__pycache__/cache.cpython-39.pyc,, +apischema/__pycache__/dataclasses.cpython-39.pyc,, +apischema/__pycache__/dependencies.cpython-39.pyc,, +apischema/__pycache__/dependent_required.cpython-39.pyc,, +apischema/__pycache__/fields.cpython-39.pyc,, +apischema/__pycache__/methods.cpython-39.pyc,, +apischema/__pycache__/ordering.cpython-39.pyc,, +apischema/__pycache__/recursion.cpython-39.pyc,, +apischema/__pycache__/settings.cpython-39.pyc,, +apischema/__pycache__/skip.cpython-39.pyc,, +apischema/__pycache__/std_types.cpython-39.pyc,, +apischema/__pycache__/tagged_unions.cpython-39.pyc,, +apischema/__pycache__/type_names.cpython-39.pyc,, +apischema/__pycache__/types.cpython-39.pyc,, +apischema/__pycache__/typing.cpython-39.pyc,, +apischema/__pycache__/utils.cpython-39.pyc,, +apischema/__pycache__/visitor.cpython-39.pyc,, +apischema/aliases.py,sha256=YnOhFCrDYWtQUMS6LInQBBLKgxCKmMOVn120tWWT7_M,1363 +apischema/cache.py,sha256=Zwes3fW_MDZ_J0bQdFG6awnQh6k3Tsdr3zm1F_LeCaE,1219 +apischema/conversions/__init__.py,sha256=f54zZOD_Wq5jzdgr15kKUxzrXhqRxkCGfvu5b5cfd7U,1014 +apischema/conversions/__pycache__/__init__.cpython-39.pyc,, +apischema/conversions/__pycache__/conversions.cpython-39.pyc,, +apischema/conversions/__pycache__/converters.cpython-39.pyc,, +apischema/conversions/__pycache__/dataclass_models.cpython-39.pyc,, +apischema/conversions/__pycache__/utils.cpython-39.pyc,, +apischema/conversions/__pycache__/visitor.cpython-39.pyc,, +apischema/conversions/__pycache__/wrappers.cpython-39.pyc,, +apischema/conversions/conversions.py,sha256=JydDimFXomRFehkWDnSI20YiWyHWRuGyeH_HqAPBsBs,3467 +apischema/conversions/converters.py,sha256=vWcMYUsfj3y05rp3kzs-_VNYsyzKbLOkBfNisE3EEgU,6094 +apischema/conversions/dataclass_models.py,sha256=LVPCdRIDnh196XnQTFFFHPoqCNWf8UsUNDUmEu8wFC8,2975 +apischema/conversions/utils.py,sha256=f6Yg0PNqSeQmmLllZGr_Ki21c_8b4pUWw-t4buzBUx0,2944 +apischema/conversions/visitor.py,sha256=CkA8YXTI0bkmam8-Tqayy6yy8qUrDK2Oe-2N26PKvQI,8471 +apischema/conversions/wrappers.py,sha256=-9kDqjFpYUOUYTqwBUrEi2tIIPDsMDX4K6ai_i6mQLE,2227 +apischema/dataclasses.py,sha256=440GM4lKn1mHc7g_U9L7KgyCtvX8akgY8xm-zQgoQ58,887 +apischema/dependencies.py,sha256=DgwQCOPe8dyrF_RoGbbNQxTCwP7jNdM3ylFB-BVLLvE,2110 +apischema/dependent_required.py,sha256=Yp2lRSTYrU_LbwgW7GKJlO4-2Mw97raROrAYMxaGTZE,338 +apischema/deserialization/__init__.py,sha256=_1gwEMVXkilWu-Z_otIKTTqXQYxRKBgt7kode0MjlDQ,35164 +apischema/deserialization/__pycache__/__init__.cpython-39.pyc,, +apischema/deserialization/__pycache__/coercion.cpython-39.pyc,, +apischema/deserialization/__pycache__/flattened.cpython-39.pyc,, +apischema/deserialization/coercion.py,sha256=2JtCLPBdh5ufJA-1hHP47S3-V41CWg-eim9HOiG-xJg,1467 +apischema/deserialization/flattened.py,sha256=ApD-W0rH74dQaEpD0j59jvcYb4QJj9HgoCdFfxvd7B8,1721 +apischema/fields.py,sha256=jb5sOnR2upTDIPKEqxpnt4AHfdy0zpsbJ_pu_10t5w8,4434 +apischema/graphql/__init__.py,sha256=n5xycpEUJSx_QoIyRstB6x-ecXl-2fCiGENeeprSzlI,498 +apischema/graphql/__pycache__/__init__.cpython-39.pyc,, +apischema/graphql/__pycache__/interfaces.cpython-39.pyc,, +apischema/graphql/__pycache__/resolvers.cpython-39.pyc,, +apischema/graphql/__pycache__/schema.cpython-39.pyc,, +apischema/graphql/interfaces.py,sha256=kJPLb96ZtRU1pdevjvgTmFIuwGsrQXXQmSI1vG6YCmU,367 +apischema/graphql/relay/__init__.py,sha256=9c1O2xI4yfOyBUOD0NwjwfENB_Gl4wTX_lGVntiNM3s,404 +apischema/graphql/relay/__pycache__/__init__.cpython-39.pyc,, +apischema/graphql/relay/__pycache__/connections.cpython-39.pyc,, +apischema/graphql/relay/__pycache__/global_identification.cpython-39.pyc,, +apischema/graphql/relay/__pycache__/mutations.cpython-39.pyc,, +apischema/graphql/relay/__pycache__/utils.cpython-39.pyc,, +apischema/graphql/relay/connections.py,sha256=yftPDvLqbfsQwq2yyumTMhWuKbdKwai0d6dN_Io3BLE,2687 +apischema/graphql/relay/global_identification.py,sha256=K6hMG4ryQAKfgMG3m1nQwQmTtJ48b4lmUUF4BgtG_9c,4562 +apischema/graphql/relay/mutations.py,sha256=wvBg-3Lubm34GsZwVHULR1eNQ1QXp0P64ouSOTrGrYk,5268 +apischema/graphql/relay/utils.py,sha256=zHeb4rcrGjDcqB-0k1LfF50JrO7GKHHzC_V8PQZsGOM,240 +apischema/graphql/resolvers.py,sha256=q9qFdYcCWSf03i8f2vHizuXyHDCjPL5Ud_AD892Z6kQ,10632 +apischema/graphql/schema.py,sha256=VcU06dyIRk_asJ-9RkoKDuHlTiEoygRacjFiVBq6llc,36811 +apischema/json_schema/__init__.py,sha256=85iCyTQitcrSJCt5eCWE47IWIWF_3cFgFY5PAhHAYVU,287 +apischema/json_schema/__pycache__/__init__.cpython-39.pyc,, +apischema/json_schema/__pycache__/conversions_resolver.cpython-39.pyc,, +apischema/json_schema/__pycache__/patterns.cpython-39.pyc,, +apischema/json_schema/__pycache__/refs.cpython-39.pyc,, +apischema/json_schema/__pycache__/schema.cpython-39.pyc,, +apischema/json_schema/__pycache__/types.cpython-39.pyc,, +apischema/json_schema/__pycache__/versions.cpython-39.pyc,, +apischema/json_schema/conversions_resolver.py,sha256=e7Yp6lTA48Fm1WzhD2I1ui8VMdMsVeiT9JlseBDESio,4374 +apischema/json_schema/patterns.py,sha256=MqjaFdifkkTmKDb41V1OJe604A3CR-mIKIdWsyxiHgg,789 +apischema/json_schema/refs.py,sha256=64NNhftG6LExoLWyk4oVOrATLtd8VJqWsYCqd7jPw9A,4657 +apischema/json_schema/schema.py,sha256=2k67nXYm74vQgahQZWnYtxLPbRsmThLR46Ql6DKUi_k,25164 +apischema/json_schema/types.py,sha256=9QbY8pyLdlCEztf1GruzUWeHaDyawLV5loxWbmo1PnM,3380 +apischema/json_schema/versions.py,sha256=yYnjlysqycyMaZsiXxGczukvyDI1OlumqudFdn5BqtY,4012 +apischema/metadata/__init__.py,sha256=i6Kxq4bHag4LYqMt1JeRE2bqGL4TTwzgfhbfOuoB1HA,1146 +apischema/metadata/__pycache__/__init__.cpython-39.pyc,, +apischema/metadata/__pycache__/implem.cpython-39.pyc,, +apischema/metadata/__pycache__/keys.cpython-39.pyc,, +apischema/metadata/implem.py,sha256=w9PI1N9DZZ-rU7K_Vpu94n6fhXaYEtvJikqoJQ-wgU0,2872 +apischema/metadata/keys.py,sha256=N-BfR9OS_ZG-szx76nyre3JrBZQhdeckujnQMae44sI,704 +apischema/methods.py,sha256=9DRxvjU4b2pglUivJfKW4LMrlfGxybOyfciQNuq-pMY,4335 +apischema/objects/__init__.py,sha256=ZuxNNqyIVgE4-k7pSpG-K_Cckt3gLR7OZliVpcXf_B0,379 +apischema/objects/__pycache__/__init__.cpython-39.pyc,, +apischema/objects/__pycache__/conversions.cpython-39.pyc,, +apischema/objects/__pycache__/fields.cpython-39.pyc,, +apischema/objects/__pycache__/getters.cpython-39.pyc,, +apischema/objects/__pycache__/visitor.cpython-39.pyc,, +apischema/objects/conversions.py,sha256=AIzGrE-9WgeLPKzWoZ1wqTqq5vKDmu73cM4DPvXgAas,5892 +apischema/objects/fields.py,sha256=Jwxeyngy4jghzvJlKgCiUrxqmDNeLJwjyWRI-kNWHdc,7878 +apischema/objects/getters.py,sha256=buHbqEzMO4fjnxsUV4kaJNQxQDR44ftWqpiPbGUvsMY,4153 +apischema/objects/visitor.py,sha256=cyEKI4JOoANI91apXDC6Uk-sG7FJbrhD_sKybfkjaZQ,5308 +apischema/ordering.py,sha256=YvtfOZ7O1UXc9v4GTrNUMcKHgsIeQuUr2PDV9Rf8T8Q,3845 +apischema/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +apischema/recursion.py,sha256=MXDbGoBdXXpgvPc2K0zX3_CuJV0nPKzn0LvON9RxvbE,5313 +apischema/schemas/__init__.py,sha256=FHbk2aJBC2eTIs8F-i7muN8vYjYQEXotnXDn0Iov2gA,4065 +apischema/schemas/__pycache__/__init__.cpython-39.pyc,, +apischema/schemas/__pycache__/annotations.cpython-39.pyc,, +apischema/schemas/__pycache__/constraints.cpython-39.pyc,, +apischema/schemas/annotations.py,sha256=MXKI6qTTVJNYNQdqMbXGdcwo2VaMUdQKAZLdjFi_x6Y,1301 +apischema/schemas/constraints.py,sha256=z87DW_X40Xm4lz5vbzE_-uE-UjH9dijYSBjggqz_VKQ,5076 +apischema/serialization/__init__.py,sha256=xzSyDV-oOL0IS_LAPeW_2WooW2-rldjuebD8iFFuyis,20905 +apischema/serialization/__pycache__/__init__.cpython-39.pyc,, +apischema/serialization/__pycache__/serialized_methods.cpython-39.pyc,, +apischema/serialization/serialized_methods.py,sha256=_Yq9M9IptwIodVG02uXkQ13BUWYquqgNRWDTn9SUAuk,5189 +apischema/settings.py,sha256=_jUMcTiTGF8i4HRvyrqs53x68rTCmdOSipdvkVXloLU,3110 +apischema/skip.py,sha256=IavIlMoyGnIqcKhfqJ--gqcuQ1V89WCx1E-PgD2PPDg,428 +apischema/std_types.py,sha256=oZ7WQnPlbJV1tfbAeUQm1n4c9D9V31GyT7g-ZMLfGyE,2755 +apischema/tagged_unions.py,sha256=T1ANMdYsUv8V-c3fb6Y7cxp9g1zmCtVm08BNrjsizPk,5236 +apischema/type_names.py,sha256=PNZN9QIYniW1npbx6KHZ58PFz4efQbLL0kWYXsY5W10,3333 +apischema/types.py,sha256=tPhy3Tf6ByWdgcZiD_l8VnZnKTVpH_kfsbpZ7Gvvb24,2491 +apischema/typing.py,sha256=UMVCgZbTaYc4zF66jZHv8j-GD1z65DyHbOHrY-g9QYg,9735 +apischema/utils.py,sha256=MzHdhCcM_gEoeDRxh6noO3OtqaXNdRZRr1fG-W6srqU,12431 +apischema/validation/__init__.py,sha256=gAtawUe-29vQi3KwDqDoGrCWZhbipGwvVVu1NDrHp8U,291 +apischema/validation/__pycache__/__init__.cpython-39.pyc,, +apischema/validation/__pycache__/dependencies.cpython-39.pyc,, +apischema/validation/__pycache__/errors.cpython-39.pyc,, +apischema/validation/__pycache__/mock.cpython-39.pyc,, +apischema/validation/__pycache__/validators.cpython-39.pyc,, +apischema/validation/dependencies.py,sha256=AIbs-9M_43RjNGGgVUJ_SnDQHa4plCnepQj6aGO07q0,2015 +apischema/validation/errors.py,sha256=mPOnZOsnPPwuqv-T5quj5FRkMB6I-ESZwqYGu_Y3kMA,4251 +apischema/validation/mock.py,sha256=gi8QBpmnKHcBAfZ_APeqSaGHpDUoinJwkHMdUY8vjtQ,1861 +apischema/validation/validators.py,sha256=myA0c2D0FViWWgbVHXdczSnXFZo7ld1GSgfWqKP-klU,6738 +apischema/visitor.py,sha256=mV3hfDOlcM1iyjEbMpaAuQvHLat0ZVPT9HXMHF2t9J0,7258 diff --git a/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/top_level.txt new file mode 100644 index 0000000..f36dfb6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema-0.16.6.dist-info/top_level.txt @@ -0,0 +1 @@ +apischema diff --git a/.venv/lib/python3.9/site-packages/apischema/__init__.py b/.venv/lib/python3.9/site-packages/apischema/__init__.py new file mode 100644 index 0000000..bb2bb8a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/__init__.py @@ -0,0 +1,99 @@ +__all__ = [ + "PassThroughOptions", + "Undefined", + "UndefinedType", + "Unsupported", + "ValidationError", + "alias", + "dependent_required", + "deserialization_method", + "deserialize", + "deserializer", + "identity", + "order", + "properties", + "schema", + "schema_ref", + "serialization_default", + "serialization_method", + "serialize", + "serialized", + "serializer", + "settings", + "type_name", + "validator", +] + +import warnings + +from . import ( # noqa: F401 + cache, + conversions, + dataclasses, + fields, + json_schema, + metadata, + objects, + skip, + tagged_unions, + validation, +) +from .aliases import alias +from .conversions import deserializer, serializer +from .dependencies import dependent_required +from .deserialization import deserialization_method, deserialize +from .metadata import properties +from .ordering import order +from .schemas import schema +from .serialization import ( + PassThroughOptions, + serialization_default, + serialization_method, + serialize, +) +from .serialization.serialized_methods import serialized +from .settings import settings +from .type_names import schema_ref, type_name +from .types import Undefined, UndefinedType +from .utils import identity +from .validation import ValidationError, validator +from .visitor import Unsupported + +try: + import graphql as _gql + + if _gql.__version__.startswith("2."): + warnings.warn( + f"graphql-core version {_gql.__version__} is incompatible with apischema;\n" + "GraphQL schema generation is thus not available." + ) + else: + from . import graphql # noqa: F401 + + __all__.append("graphql") + del _gql +except ImportError: + pass + + +def __getattr__(name): + if name == "graphql": + raise AttributeError( + "GraphQL feature requires graphql-core library\n" + "Run `pip install apischema[graphql]` to install it" + ) + if name == "skip": + warnings.warn("apischema.skip module is deprecated") + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + +def register_default_conversions(): + """Handle standard library + internal types""" + from . import std_types # noqa: F401 + + deserializer(ValidationError.from_errors) + serializer(ValidationError.errors) + + +register_default_conversions() +del register_default_conversions diff --git a/.venv/lib/python3.9/site-packages/apischema/aliases.py b/.venv/lib/python3.9/site-packages/apischema/aliases.py new file mode 100644 index 0000000..572a1c5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/aliases.py @@ -0,0 +1,53 @@ +from typing import Callable, MutableMapping, TypeVar, overload + +from apischema.cache import CacheAwareDict +from apischema.types import Metadata, MetadataImplem + +Aliaser = Callable[[str], str] +Cls = TypeVar("Cls", bound=type) + +_class_aliasers: MutableMapping[type, Aliaser] = CacheAwareDict({}) + +get_class_aliaser = _class_aliasers.get + + +@overload +def alias(alias_: str, *, override: bool = True) -> Metadata: + ... + + +@overload +def alias(override: bool) -> Metadata: + ... + + +@overload +def alias(aliaser: Aliaser) -> Callable[[Cls], Cls]: + ... + + +def alias(arg=None, *, override: bool = True): # type: ignore + """Field alias or class aliaser + + :param alias_: alias of the field + :param override: alias can be overridden by a class aliaser + :param aliaser: compute alias for each (overridable) field of the class decorated + """ + from apischema.metadata.keys import ALIAS_METADATA, ALIAS_NO_OVERRIDE_METADATA + + if callable(arg): + + def aliaser(cls: Cls) -> Cls: + _class_aliasers[cls] = arg + return cls + + return aliaser + else: + metadata = MetadataImplem() + if arg is not None: + metadata[ALIAS_METADATA] = arg + if not override: + metadata[ALIAS_NO_OVERRIDE_METADATA] = True + if not metadata: + raise NotImplementedError + return metadata diff --git a/.venv/lib/python3.9/site-packages/apischema/cache.py b/.venv/lib/python3.9/site-packages/apischema/cache.py new file mode 100644 index 0000000..4323cc5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/cache.py @@ -0,0 +1,54 @@ +__all__ = ["cache", "reset", "set_size"] +import sys +from functools import lru_cache +from typing import Callable, Iterator, MutableMapping, TypeVar, cast + +from apischema.utils import type_dict_wrapper + +_cached: list = [] + +Func = TypeVar("Func", bound=Callable) + + +def cache(func: Func) -> Func: + cached = cast(Func, lru_cache()(func)) + _cached.append(cached) + return cached + + +def reset(): + for cached in _cached: + cached.cache_clear() + + +def set_size(size: int): + for cached in _cached: + wrapped = cached.__wrapped__ + setattr( + sys.modules[wrapped.__module__], wrapped.__name__, lru_cache(size)(wrapped) + ) + + +K = TypeVar("K") +V = TypeVar("V") + + +class CacheAwareDict(MutableMapping[K, V]): + def __init__(self, wrapped: MutableMapping[K, V]): + self.wrapped = type_dict_wrapper(wrapped) + + def __getitem__(self, key: K) -> V: + return self.wrapped[key] + + def __setitem__(self, key: K, value: V): + self.wrapped[key] = value + reset() + + def __delitem__(self, key: K): + del self.wrapped[key] + + def __len__(self) -> int: + return len(self.wrapped) + + def __iter__(self) -> Iterator[K]: + return iter(self.wrapped) diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/__init__.py b/.venv/lib/python3.9/site-packages/apischema/conversions/__init__.py new file mode 100644 index 0000000..c30c8f2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/__init__.py @@ -0,0 +1,43 @@ +__all__ = [ + "AnyConversion", + "Conversion", + "LazyConversion", + "as_names", + "as_str", + "dataclass_input_wrapper", + "deserializer", + "reset_deserializers", + "reset_serializer", + "serializer", +] + +import sys +import warnings + +from .conversions import AnyConversion, Conversion, LazyConversion +from .converters import ( + as_names, + as_str, + deserializer, + reset_deserializers, + reset_serializer, + serializer, +) +from .wrappers import dataclass_input_wrapper + +if sys.version_info >= (3, 7): + + def __getattr__(name): + if name == "identity": + from apischema.utils import identity # noqa: F811 + + warnings.warn( + "apischema.conversions.identity is deprecated, " + "use apischema.identity instead", + DeprecationWarning, + ) + return identity + raise AttributeError(f"module {__name__} has no attribute {name}") + +else: + from apischema.utils import identity # noqa: F401 diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/conversions.py b/.venv/lib/python3.9/site-packages/apischema/conversions/conversions.py new file mode 100644 index 0000000..e97b1cf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/conversions.py @@ -0,0 +1,111 @@ +from dataclasses import dataclass +from functools import lru_cache +from typing import ( + Any, + Callable, + Collection, + Dict, + Generic, + List, + NewType, + Optional, + TYPE_CHECKING, + Tuple, + TypeVar, + Union, +) + +from apischema.conversions.utils import Converter, converter_types +from apischema.dataclasses import replace +from apischema.methods import is_method, method_class, method_wrapper +from apischema.types import AnyType +from apischema.typing import is_type_var +from apischema.utils import deprecate_kwargs, identity + +if TYPE_CHECKING: + pass + +ConvOrProp = TypeVar("ConvOrProp", Converter, property) + + +@dataclass(frozen=True) +class Conversion(Generic[ConvOrProp]): + converter: ConvOrProp + source: AnyType = None + target: AnyType = None + sub_conversion: Optional["AnyConversion"] = None + inherited: Optional[bool] = None + + +deprecate_kwargs({"sub_conversions": "sub_conversion"})(Conversion) + + +@dataclass(frozen=True) +class LazyConversion: + get: Callable[[], Optional["AnyConversion"]] + + def __post_init__(self): + object.__setattr__(self, "get", lru_cache(1)(self.get)) + + @property + def inherited(self) -> Optional[bool]: + conversion = self.get() # type: ignore + return isinstance(conversion, Conversion) and conversion.inherited + + +ConvOrFunc = Union[Conversion, Converter, property, LazyConversion] +AnyConversion = Union[ConvOrFunc, Tuple[ConvOrFunc, ...]] +DefaultConversion = Callable[[AnyType], Optional[AnyConversion]] + + +ResolvedConversion = NewType("ResolvedConversion", Conversion[Converter]) +ResolvedConversions = Tuple[ResolvedConversion, ...] # Tuple in order to be hashable + + +def resolve_conversion( + conversion: Union[Converter, property, Conversion], namespace: Dict[str, Any] = None +) -> ResolvedConversion: + if not isinstance(conversion, Conversion): + conversion = Conversion(conversion) + if is_method(conversion.converter): + if conversion.source is None: + conversion = replace(conversion, source=method_class(conversion.converter)) + conversion = replace(conversion, converter=method_wrapper(conversion.converter)) + assert not isinstance(conversion.converter, property) + source, target = converter_types( + conversion.converter, conversion.source, conversion.target, namespace + ) + return ResolvedConversion(replace(conversion, source=source, target=target)) + + +def resolve_any_conversion(conversion: Optional[AnyConversion]) -> ResolvedConversions: + if not conversion: + return () + result: List[ResolvedConversion] = [] + for conv in conversion if isinstance(conversion, Collection) else [conversion]: + if isinstance(conv, LazyConversion): + result.extend(resolve_any_conversion(conv.get())) # type: ignore + else: + result.append(resolve_conversion(conv)) + return tuple(result) + + +def handle_identity_conversion( + conversion: ResolvedConversion, tp: AnyType +) -> ResolvedConversion: + if ( + is_identity(conversion) + and conversion.source == conversion.target + and is_type_var(conversion.source) + ): + return ResolvedConversion(replace(conversion, source=tp, target=tp)) + else: + return conversion + + +def is_identity(conversion: ResolvedConversion) -> bool: + return ( + conversion.converter == identity + and conversion.source == conversion.target + and conversion.sub_conversion is None + ) diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/converters.py b/.venv/lib/python3.9/site-packages/apischema/conversions/converters.py new file mode 100644 index 0000000..09711fa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/converters.py @@ -0,0 +1,208 @@ +import sys +from collections import defaultdict +from enum import Enum +from functools import partial +from types import new_class +from typing import ( + Callable, + List, + MutableMapping, + Optional, + TYPE_CHECKING, + Type, + TypeVar, + Union, + overload, +) + +from apischema.cache import CacheAwareDict +from apischema.conversions import LazyConversion +from apischema.conversions.conversions import ( + AnyConversion, + ConvOrFunc, + Conversion, + resolve_conversion, +) +from apischema.conversions.utils import Converter, is_convertible +from apischema.methods import MethodOrProperty, MethodWrapper, is_method, method_class +from apischema.type_names import type_name +from apischema.types import AnyType +from apischema.typing import is_type_var +from apischema.utils import get_args2, get_origin_or_type, stop_signature_abuse + +if TYPE_CHECKING: + pass + + +_deserializers: MutableMapping[AnyType, List[ConvOrFunc]] = CacheAwareDict( + defaultdict(list) +) +_serializers: MutableMapping[AnyType, ConvOrFunc] = CacheAwareDict({}) +Deserializer = TypeVar( + "Deserializer", bound=Union[Callable, Conversion, staticmethod, type] +) +Serializer = TypeVar("Serializer", bound=Union[Callable, Conversion, property, type]) + +default_deserialization: Callable[[type], Optional[AnyConversion]] +# defaultdict.get is not hashable in 3.7 +if sys.version_info < (3, 8): + + def default_deserialization(tp): + return _deserializers.get(tp) + +else: + default_deserialization = _deserializers.get # type: ignore + + +def default_serialization(tp: Type) -> Optional[AnyConversion]: + for sub_cls in getattr(tp, "__mro__", [tp]): + if sub_cls in _serializers: + conversion = _serializers[sub_cls] + if ( + sub_cls == tp + or not isinstance(conversion, (Conversion, LazyConversion)) + or conversion.inherited in (None, True) + ): + return conversion + else: + return None + + +def check_converter_type(tp: AnyType) -> AnyType: + origin = get_origin_or_type(tp) + if not is_convertible(tp): + raise TypeError(f"{origin} is not convertible") + if not all(map(is_type_var, get_args2(tp))): + raise TypeError("Generic conversion doesn't support specialization") + return origin + + +def _add_deserializer(conversion: ConvOrFunc, target: AnyType): + target = check_converter_type(target) + if conversion not in _deserializers[target]: + _deserializers[target].append(conversion) + + +class DeserializerDescriptor(MethodWrapper[staticmethod]): + def __set_name__(self, owner, name): + super().__set_name__(owner, name) + method = self._method.__get__(None, object) + resolved = resolve_conversion(method, {owner.__name__: owner}) + _add_deserializer(method, resolved.target) + + +@overload +def deserializer(deserializer: Deserializer) -> Deserializer: + ... + + +@overload +def deserializer( + *, lazy: Callable[[], Union[Converter, Conversion]], target: Type +) -> None: + ... + + +def deserializer( + deserializer: Deserializer = None, + *, + lazy: Callable[[], Union[Converter, Conversion]] = None, + target: Type = None, +): + if deserializer is not None: + if isinstance(deserializer, staticmethod): + return DeserializerDescriptor(deserializer) + elif isinstance(deserializer, LazyConversion): + stop_signature_abuse() + else: + resolved = resolve_conversion(deserializer) # type: ignore + _add_deserializer(deserializer, resolved.target) # type: ignore + return deserializer + elif lazy is not None and target is not None: + _add_deserializer(LazyConversion(lazy), target) + else: + stop_signature_abuse() + + +def _add_serializer(conversion: ConvOrFunc, source: AnyType): + source = check_converter_type(source) + _serializers[source] = conversion + + +class SerializerDescriptor(MethodWrapper[MethodOrProperty]): + def __set_name__(self, owner, name): + super().__set_name__(owner, name) + _add_serializer(self._method, source=owner) + + +@overload +def serializer(serializer: Serializer) -> Serializer: + ... + + +@overload +def serializer( + *, lazy: Callable[[], Union[Converter, Conversion]], source: Type +) -> Callable[[Serializer], Serializer]: + ... + + +def serializer( + serializer: Serializer = None, + *, + lazy: Callable[[], Union[Converter, Conversion]] = None, + source: Type = None, +): + if serializer is not None: + if is_method(serializer) and method_class(serializer) is None: # type: ignore + return SerializerDescriptor(serializer) # type: ignore + elif isinstance(serializer, LazyConversion): + stop_signature_abuse() + else: + resolved = resolve_conversion(serializer) + _add_serializer(serializer, resolved.source) + return serializer + elif lazy is not None and source is not None: + _add_serializer(LazyConversion(lazy), source) + else: + stop_signature_abuse() + + +def reset_deserializers(cls: Type): + _deserializers.pop(cls, ...) + + +def reset_serializer(cls: Type): + _serializers.pop(cls, ...) + + +Cls = TypeVar("Cls", bound=type) + + +def as_str(cls: Cls) -> Cls: + deserializer(Conversion(cls, source=str)) + serializer(Conversion(str, source=cls)) + return cls + + +EnumCls = TypeVar("EnumCls", bound=Type[Enum]) + + +def as_names(cls: EnumCls, aliaser: Callable[[str], str] = lambda s: s) -> EnumCls: + # Enum requires to call namespace __setitem__ + def exec_body(namespace: dict): + for elt in cls: # type: ignore + namespace[elt.name] = aliaser(elt.name) + + if not issubclass(cls, Enum): + raise TypeError("as_names must be called with Enum subclass") + name_cls = type_name(None)( + new_class(cls.__name__, (str, Enum), exec_body=exec_body) + ) + deserializer(Conversion(partial(getattr, cls), source=name_cls, target=cls)) + + def get_name(obj): + return getattr(name_cls, obj.name) + + serializer(Conversion(get_name, source=cls, target=name_cls)) + return cls diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/dataclass_models.py b/.venv/lib/python3.9/site-packages/apischema/conversions/dataclass_models.py new file mode 100644 index 0000000..84b9bca --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/dataclass_models.py @@ -0,0 +1,92 @@ +import warnings +from dataclasses import dataclass +from types import new_class +from typing import Callable, Optional, TYPE_CHECKING, Tuple, Type, Union + +from apischema.conversions import Conversion +from apischema.conversions.conversions import ResolvedConversion +from apischema.dataclasses import replace +from apischema.utils import PREFIX, identity + +if TYPE_CHECKING: + from apischema.deserialization.coercion import Coerce + +Model = Union[Type, Callable[[], Type]] + + +def check_model(origin: Type, model: Type): + if not isinstance(model, type): + raise TypeError("Dataclass model must be a dataclass") + if getattr(origin, "__parameters__", ()) != getattr(model, "__parameters__", ()): + raise TypeError("Dataclass model must have the same generic parameters") + + +MODEL_ORIGIN_ATTR = f"{PREFIX}model_origin" + +DATACLASS_ATTR = "_dataclass" + + +@dataclass(frozen=True) +class DataclassModel: + origin: Type + model: Model + fields_only: bool + + @property + def dataclass(self) -> Type: + if not hasattr(self, "_dataclass"): + origin = self.origin + if isinstance(self.model, type): + assert check_model(origin, self.model) is None + model = self.model + else: + model = self.model() + check_model(origin, model) + namespace = {"__new__": lambda _, *args, **kwargs: origin(*args, **kwargs)} + if not self.fields_only: + namespace[MODEL_ORIGIN_ATTR] = origin + cls = new_class( + model.__name__, (model,), exec_body=lambda ns: ns.update(namespace) + ) + object.__setattr__(self, "_dataclass", cls) + return getattr(self, "_dataclass") + + +def dataclass_model( + origin: Type, + model: Model, + *, + fields_only: bool = False, + additional_properties: Optional[bool] = None, + coercion: Optional["Coerce"] = None, + fall_back_on_default: Optional[bool] = None, + exclude_unset: Optional[bool] = None, +) -> Tuple[Conversion, Conversion]: + warnings.warn( + "dataclass_model is deprecated, use set_object_fields instead", + DeprecationWarning, + ) + if isinstance(model, type): + check_model(origin, model) + + model_type = DataclassModel(origin, model, fields_only) + return Conversion(identity, source=model_type, target=origin), Conversion( + identity, source=origin, target=model_type + ) + + +def has_model_origin(cls: Type) -> bool: + return hasattr(cls, MODEL_ORIGIN_ATTR) + + +def get_model_origin(cls: Type) -> Type: + return getattr(cls, MODEL_ORIGIN_ATTR) + + +def handle_dataclass_model(conversion: ResolvedConversion) -> ResolvedConversion: + conv: Conversion = conversion + if isinstance(conv.source, DataclassModel): + conv = replace(conv, source=conv.source.dataclass) + if isinstance(conv.target, DataclassModel): + conv = replace(conv, target=conv.target.dataclass) + return ResolvedConversion(conv) diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/utils.py b/.venv/lib/python3.9/site-packages/apischema/conversions/utils.py new file mode 100644 index 0000000..cd8d8e1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/utils.py @@ -0,0 +1,80 @@ +from inspect import Parameter, signature +from typing import Any, Callable, Dict, Generic, Optional, Tuple, Type, cast + +from apischema.types import AnyType +from apischema.typing import ( + get_type_hints, + is_annotated, + is_literal, + is_new_type, + is_type, + is_union, +) +from apischema.utils import get_origin_or_type + +Converter = Callable[[Any], Any] + + +def converter_types( + converter: Converter, + source: Optional[AnyType] = None, + target: Optional[AnyType] = None, + namespace: Dict[str, Any] = None, +) -> Tuple[AnyType, AnyType]: + try: + # in pre 3.9, Generic __new__ perturb signature of types + if ( + isinstance(converter, type) + and converter.__new__ is Generic.__new__ is not object.__new__ + and converter.__init__ is not object.__init__ # type: ignore + ): + parameters = list(signature(converter.__init__).parameters.values())[1:] # type: ignore + else: + parameters = list(signature(converter).parameters.values()) + except ValueError: # builtin types + if target is None and is_type(converter): + target = cast(Type[Any], converter) + if source is None: + raise TypeError("Converter source is unknown") from None + else: + if not parameters: + raise TypeError("converter must have at least one parameter") + first_param, *other_params = parameters + for p in other_params: + if p.default is Parameter.empty and p.kind not in ( + Parameter.VAR_POSITIONAL, + Parameter.VAR_KEYWORD, + ): + raise TypeError( + "converter must have at most one parameter without default" + ) + if source is not None and target is not None: + return source, target + types = get_type_hints(converter, None, namespace, include_extras=True) + if not types and is_type(converter): + types = get_type_hints( + converter.__new__, None, namespace, include_extras=True + ) or get_type_hints( + converter.__init__, None, namespace, include_extras=True # type: ignore + ) + if source is None: + try: + source = types.pop(first_param.name) + except KeyError: + raise TypeError("converter source is unknown") from None + if target is None: + if is_type(converter): + target = cast(Type, converter) + else: + try: + target = types.pop("return") + except KeyError: + raise TypeError("converter target is unknown") from None + return source, target + + +def is_convertible(tp: AnyType) -> bool: + origin = get_origin_or_type(tp) + return is_new_type(tp) or ( + is_type(origin) and not (is_literal(tp) or is_annotated(tp) or is_union(origin)) + ) diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/visitor.py b/.venv/lib/python3.9/site-packages/apischema/conversions/visitor.py new file mode 100644 index 0000000..b9d58e0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/visitor.py @@ -0,0 +1,245 @@ +from contextlib import contextmanager, suppress +from dataclasses import replace +from functools import lru_cache +from types import new_class +from typing import ( + Any, + Collection, + Generic, + Iterable, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, +) + +from apischema.conversions import LazyConversion +from apischema.conversions.conversions import ( + AnyConversion, + DefaultConversion, + ResolvedConversion, + ResolvedConversions, + handle_identity_conversion, + is_identity, + resolve_any_conversion, +) +from apischema.conversions.utils import is_convertible +from apischema.metadata.implem import ConversionMetadata +from apischema.metadata.keys import CONVERSION_METADATA +from apischema.type_names import type_name +from apischema.types import AnyType +from apischema.typing import get_args, is_type_var +from apischema.utils import ( + context_setter, + get_args2, + get_origin_or_type, + has_type_vars, + is_subclass, + substitute_type_vars, + subtyping_substitution, +) +from apischema.visitor import Result, Unsupported, Visitor + +Deserialization = ResolvedConversions +Serialization = ResolvedConversion +Conv = TypeVar("Conv") + + +class ConversionsVisitor(Visitor[Result], Generic[Conv, Result]): + def __init__(self, default_conversion: DefaultConversion): + self.default_conversion = default_conversion + self._conversion: Optional[AnyConversion] = None + + def _has_conversion( + self, tp: AnyType, conversion: Optional[AnyConversion] + ) -> Tuple[bool, Optional[Conv]]: + raise NotImplementedError + + def _annotated_conversion( + self, annotation: ConversionMetadata + ) -> Optional[AnyConversion]: + raise NotImplementedError + + def annotated(self, tp: AnyType, annotations: Sequence[Any]) -> Result: + for annotation in reversed(annotations): + if isinstance(annotation, Mapping) and CONVERSION_METADATA in annotation: + with self._replace_conversion( + self._annotated_conversion(annotation[CONVERSION_METADATA]) + ): + return super().annotated(tp, annotations) + return super().annotated(tp, annotations) + + def _union_results(self, alternatives: Iterable[AnyType]) -> Sequence[Result]: + results = [] + for alt in alternatives: + with suppress(Unsupported): + results.append(self.visit(alt)) + if not results: + raise Unsupported(Union[tuple(alternatives)]) + return results + + def _visited_union(self, results: Sequence[Result]) -> Result: + raise NotImplementedError + + def union(self, alternatives: Sequence[AnyType]) -> Result: + return self._visited_union(self._union_results(alternatives)) + + @contextmanager + def _replace_conversion(self, conversion: Optional[AnyConversion]): + with context_setter(self): + self._conversion = resolve_any_conversion(conversion) or None + yield + + def visit_with_conv( + self, tp: AnyType, conversion: Optional[AnyConversion] + ) -> Result: + with self._replace_conversion(conversion): + return self.visit(tp) + + def _visit_conversion( + self, + tp: AnyType, + conversion: Conv, + dynamic: bool, + next_conversion: Optional[AnyConversion], + ) -> Result: + raise NotImplementedError + + def visit_conversion( + self, + tp: AnyType, + conversion: Optional[Conv], + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ) -> Result: + if conversion is not None: + return self._visit_conversion(tp, conversion, dynamic, next_conversion) + else: + with self._replace_conversion(next_conversion): + return super().visit(tp) + + def visit(self, tp: AnyType) -> Result: + if not is_convertible(tp): + return self.visit_conversion(tp, None, False, self._conversion) + dynamic, conversion = self._has_conversion(tp, self._conversion) + if not dynamic: + _, conversion = self._has_conversion( + tp, self.default_conversion(get_origin_or_type(tp)) # type: ignore + ) + next_conversion = None + if not dynamic and is_subclass(tp, Collection): + next_conversion = self._conversion + return self.visit_conversion(tp, conversion, dynamic, next_conversion) + + +def sub_conversion( + conversion: ResolvedConversion, next_conversion: Optional[AnyConversion] +) -> Optional[AnyConversion]: + return ( + LazyConversion(lambda: conversion.sub_conversion), + LazyConversion(lambda: next_conversion), + ) + + +@lru_cache(maxsize=0) +def self_deserialization_wrapper(cls: Type) -> Type: + wrapper = new_class( + f"{cls.__name__}SelfDeserializer", + (cls[cls.__parameters__] if has_type_vars(cls) else cls,), + exec_body=lambda ns: ns.update( + {"__new__": lambda _, *args, **kwargs: cls(*args, **kwargs)} + ), + ) + return type_name(None)(wrapper) + + +class DeserializationVisitor(ConversionsVisitor[Deserialization, Result]): + @staticmethod + def _has_conversion( + tp: AnyType, conversion: Optional[AnyConversion] + ) -> Tuple[bool, Optional[Deserialization]]: + identity_conv, result = False, [] + for conv in resolve_any_conversion(conversion): + conv = handle_identity_conversion(conv, tp) + if is_subclass(conv.target, tp): + if is_identity(conv): + if identity_conv: + continue + identity_conv = True + wrapper: AnyType = self_deserialization_wrapper( + get_origin_or_type(tp) + ) + if get_args(tp): + wrapper = wrapper[get_args(tp)] + conv = ResolvedConversion(replace(conv, source=wrapper)) + if is_type_var(conv.source) or any( + map(is_type_var, get_args2(conv.source)) + ): + _, substitution = subtyping_substitution(tp, conv.target) + conv = replace( + conv, source=substitute_type_vars(conv.source, substitution) + ) + result.append(ResolvedConversion(replace(conv, target=tp))) + if identity_conv and len(result) == 1: + return True, None + else: + return bool(result), tuple(result) or None + + def _annotated_conversion( + self, annotation: ConversionMetadata + ) -> Optional[AnyConversion]: + return annotation.deserialization + + def _visit_conversion( + self, + tp: AnyType, + conversion: Deserialization, + dynamic: bool, + next_conversion: Optional[AnyConversion], + ) -> Result: + results = [ + self.visit_with_conv(conv.source, sub_conversion(conv, next_conversion)) + for conv in conversion + ] + return self._visited_union(results) + + +class SerializationVisitor(ConversionsVisitor[Serialization, Result]): + @staticmethod + def _has_conversion( + tp: AnyType, conversion: Optional[AnyConversion] + ) -> Tuple[bool, Optional[Serialization]]: + for conv in resolve_any_conversion(conversion): + conv = handle_identity_conversion(conv, tp) + if is_subclass(tp, conv.source): + if is_identity(conv): + return True, None + if is_type_var(conv.target) or any( + map(is_type_var, get_args2(conv.target)) + ): + substitution, _ = subtyping_substitution(conv.source, tp) + conv = replace( + conv, target=substitute_type_vars(conv.target, substitution) + ) + return True, ResolvedConversion(replace(conv, source=tp)) + else: + return False, None + + def _annotated_conversion( + self, annotation: ConversionMetadata + ) -> Optional[AnyConversion]: + return annotation.serialization + + def _visit_conversion( + self, + tp: AnyType, + conversion: Serialization, + dynamic: bool, + next_conversion: Optional[AnyConversion], + ) -> Result: + return self.visit_with_conv( + conversion.target, sub_conversion(conversion, next_conversion) + ) diff --git a/.venv/lib/python3.9/site-packages/apischema/conversions/wrappers.py b/.venv/lib/python3.9/site-packages/apischema/conversions/wrappers.py new file mode 100644 index 0000000..3ebc393 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/conversions/wrappers.py @@ -0,0 +1,55 @@ +import warnings +from dataclasses import MISSING, field as field_, make_dataclass +from inspect import Parameter, iscoroutinefunction, signature +from typing import Any, Callable, Mapping, Tuple, Type + +from apischema.metadata import properties +from apischema.typing import get_type_hints +from apischema.utils import to_camel_case + + +def dataclass_input_wrapper( + func: Callable, parameters_metadata: Mapping[str, Mapping] = None +) -> Tuple[Callable, Type]: + warnings.warn( + "dataclass_input_wrapper is deprecated, use object_deserialization instead", + DeprecationWarning, + ) + parameters_metadata = parameters_metadata or {} + types = get_type_hints(func, include_extras=True) + fields = [] + params, kwargs_param = [], None + for param_name, param in signature(func).parameters.items(): + if param.kind is Parameter.POSITIONAL_ONLY: + raise TypeError("Positional only parameters are not supported") + field_type = types.get(param_name, Any) + if param.kind in {Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY}: + default = MISSING if param.default is Parameter.empty else param.default + field = field_( + default=default, metadata=parameters_metadata.get(param_name) + ) + fields.append((param_name, field_type, field)) + params.append(param_name) + if param.kind == Parameter.VAR_KEYWORD: + field = field_(default_factory=dict, metadata=properties) + fields.append((param_name, Mapping[str, field_type], field)) # type: ignore + kwargs_param = param_name + + input_cls = make_dataclass(to_camel_case(func.__name__), fields) + + def wrapper(input): + kwargs = {name: getattr(input, name) for name in params} + if kwargs_param: + kwargs.update(getattr(input, kwargs_param)) + return func(**kwargs) + + if iscoroutinefunction(func): + wrapped = wrapper + + async def wrapper(input): + return await wrapped(input) + + wrapper.__annotations__["input"] = input_cls + if "return" in func.__annotations__: + wrapper.__annotations__["return"] = func.__annotations__["return"] + return wrapper, input_cls diff --git a/.venv/lib/python3.9/site-packages/apischema/dataclasses.py b/.venv/lib/python3.9/site-packages/apischema/dataclasses.py new file mode 100644 index 0000000..4ac3469 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/dataclasses.py @@ -0,0 +1,26 @@ +# flake8: noqa +from dataclasses import * + + +def _replace(__obj, **changes): + from apischema.fields import FIELDS_SET_ATTR, fields_set, set_fields + from dataclasses import replace as replace_, _FIELDS, _FIELD_INITVAR # type: ignore + + # Fix https://bugs.python.org/issue36470 + assert is_dataclass(__obj) + for name, field in getattr(__obj, _FIELDS).items(): + if field._field_type == _FIELD_INITVAR and name not in changes: # type: ignore + if field.default is not MISSING: + changes[name] = field.default + elif field.default_factory is not MISSING: + changes[name] = field.default_factory() + + result = replace_(__obj, **changes) + if hasattr(__obj, FIELDS_SET_ATTR): + set_fields(result, *fields_set(__obj), *changes, overwrite=True) + return result + + +globals()[replace.__name__] = _replace + +del _replace diff --git a/.venv/lib/python3.9/site-packages/apischema/dependencies.py b/.venv/lib/python3.9/site-packages/apischema/dependencies.py new file mode 100644 index 0000000..7ee4ee9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/dependencies.py @@ -0,0 +1,75 @@ +from collections import defaultdict +from dataclasses import dataclass +from typing import ( + AbstractSet, + Any, + Collection, + Dict, + List, + Mapping, + MutableMapping, + Set, + Tuple, + overload, +) + +from apischema.cache import CacheAwareDict +from apischema.objects.fields import check_field_or_name, get_field_name + +_dependent_requireds: MutableMapping[ + type, List[Tuple[Any, Collection[Any]]] +] = CacheAwareDict(defaultdict(list)) + +DependentRequired = Mapping[str, AbstractSet[str]] + + +def get_dependent_required(cls: type) -> DependentRequired: + result: Dict[str, Set[str]] = defaultdict(set) + for sub_cls in cls.__mro__: + for field, required in _dependent_requireds[sub_cls]: + result[get_field_name(field)].update(map(get_field_name, required)) + return result + + +@dataclass +class DependentRequiredDescriptor: + fields: Mapping[Any, Collection[Any]] + groups: Collection[Collection[Any]] + + def __set_name__(self, owner, name): + setattr(owner, name, None) + dependent_required(self.fields, *self.groups, owner=owner) + + +@overload +def dependent_required( + fields: Mapping[Any, Collection[Any]], *groups: Collection[Any], owner: type = None +): + ... + + +@overload +def dependent_required(*groups: Collection[Any], owner: type = None): + ... + + +def dependent_required(*groups: Collection[Any], owner: type = None): # type: ignore + if not groups: + return + fields: Mapping[Any, Collection[Any]] = {} + if isinstance(groups[0], Mapping): + fields, *groups = groups # type: ignore + if owner is None: + return DependentRequiredDescriptor(fields, groups) + else: + + dep_req = _dependent_requireds[owner] + for field, required in fields.items(): + dep_req.append((field, required)) + check_field_or_name(field) + for req in required: + check_field_or_name(req) + for group in map(list, groups): + for i, field in enumerate(group): + check_field_or_name(field) + dep_req.append((field, [group[:i], group[i:]])) diff --git a/.venv/lib/python3.9/site-packages/apischema/dependent_required.py b/.venv/lib/python3.9/site-packages/apischema/dependent_required.py new file mode 100644 index 0000000..d4a07ae --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/dependent_required.py @@ -0,0 +1,12 @@ +import warnings + +from apischema.dependencies import dependent_required + + +def DependentRequired(*args, **kwargs): + warnings.warn( + "apischema.dependent_required.DependentRequired is deprecated," + " use apischema.dependent_required instead", + DeprecationWarning, + ) + return dependent_required(*args, **kwargs) diff --git a/.venv/lib/python3.9/site-packages/apischema/deserialization/__init__.py b/.venv/lib/python3.9/site-packages/apischema/deserialization/__init__.py new file mode 100644 index 0000000..53d0eea --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/deserialization/__init__.py @@ -0,0 +1,924 @@ +from collections import defaultdict +from dataclasses import dataclass, replace +from enum import Enum +from functools import lru_cache +from typing import ( + AbstractSet, + Any, + Callable, + Collection, + Dict, + List, + Mapping, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + overload, +) + +from apischema.aliases import Aliaser +from apischema.cache import cache +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.conversions.visitor import ( + Deserialization, + DeserializationVisitor, + sub_conversion, +) +from apischema.dependencies import get_dependent_required +from apischema.deserialization.coercion import Coerce, Coercer +from apischema.deserialization.flattened import get_deserialization_flattened_aliases +from apischema.json_schema.patterns import infer_pattern +from apischema.json_schema.types import bad_type +from apischema.metadata.implem import ValidatorsMetadata +from apischema.metadata.keys import SCHEMA_METADATA, VALIDATORS_METADATA +from apischema.objects import ObjectField +from apischema.objects.fields import FieldKind +from apischema.objects.visitor import DeserializationObjectVisitor +from apischema.recursion import RecursiveConversionsVisitor +from apischema.schemas import Schema, get_schema +from apischema.schemas.constraints import Check, Constraints, merge_constraints +from apischema.types import AnyType, NoneType +from apischema.typing import get_args, get_origin +from apischema.utils import ( + Lazy, + PREFIX, + deprecate_kwargs, + get_origin_or_type, + literal_values, + opt_or, +) +from apischema.validation import get_validators +from apischema.validation.errors import ErrorKey, ValidationError, merge_errors +from apischema.validation.mock import ValidatorMock +from apischema.validation.validators import Validator, validate +from apischema.visitor import Unsupported + +MISSING_PROPERTY = "missing property" +UNEXPECTED_PROPERTY = "unexpected property" + +NOT_NONE = object() + +INIT_VARS_ATTR = f"{PREFIX}_init_vars" + +T = TypeVar("T") + + +DeserializationMethod = Callable[[Any], T] +Factory = Callable[[Optional[Constraints], Sequence[Validator]], DeserializationMethod] + + +@dataclass(frozen=True) +class DeserializationMethodFactory: + factory: Factory + cls: Optional[type] = None + constraints: Optional[Constraints] = None + validators: Tuple[Validator, ...] = () + + def merge( + self, constraints: Optional[Constraints], validators: Sequence[Validator] = () + ) -> "DeserializationMethodFactory": + if constraints is None and not validators: + return self + return replace( + self, + constraints=merge_constraints(self.constraints, constraints), + validators=(*validators, *self.validators), + ) + + @property # type: ignore + @lru_cache() + def method(self) -> DeserializationMethod: + return self.factory(self.constraints, self.validators) # type: ignore + + +def get_constraints(schema: Optional[Schema]) -> Optional[Constraints]: + return schema.constraints if schema is not None else None + + +def get_constraint_checks( + constraints: Optional[Constraints], cls: type +) -> Collection[Tuple[Check, Any, str]]: + return () if constraints is None else constraints.checks_by_type[cls] + + +class DeserializationMethodVisitor( + RecursiveConversionsVisitor[Deserialization, DeserializationMethodFactory], + DeserializationVisitor[DeserializationMethodFactory], + DeserializationObjectVisitor[DeserializationMethodFactory], +): + def __init__( + self, + additional_properties: bool, + aliaser: Aliaser, + coercer: Optional[Coercer], + default_conversion: DefaultConversion, + fall_back_on_default: bool, + ): + super().__init__(default_conversion) + self.additional_properties = additional_properties + self.aliaser = aliaser + self.coercer = coercer + self.fall_back_on_default = fall_back_on_default + + def _recursive_result( + self, lazy: Lazy[DeserializationMethodFactory] + ) -> DeserializationMethodFactory: + def factory( + constraints: Optional[Constraints], validators: Sequence[Validator] + ) -> DeserializationMethod: + rec_method = None + + def method(data: Any) -> Any: + nonlocal rec_method + if rec_method is None: + rec_method = lazy().merge(constraints, validators).method + return rec_method(data) + + return method + + return DeserializationMethodFactory(factory) + + def visit_not_recursive(self, tp: AnyType) -> DeserializationMethodFactory: + return deserialization_method_factory( + tp, + self.additional_properties, + self.aliaser, + self.coercer, + self._conversion, + self.default_conversion, + self.fall_back_on_default, + ) + + def annotated( + self, tp: AnyType, annotations: Sequence[Any] + ) -> DeserializationMethodFactory: + factory = super().annotated(tp, annotations) + for annotation in reversed(annotations): + if isinstance(annotation, Mapping): + factory = factory.merge( + get_constraints(annotation.get(SCHEMA_METADATA)), + annotation.get( + VALIDATORS_METADATA, ValidatorsMetadata(()) + ).validators, + ) + return factory + + def _factory( + self, factory: Factory, cls: Optional[type] = None, validation: bool = True + ) -> DeserializationMethodFactory: + def wrapper( + constraints: Optional[Constraints], validators: Sequence[Validator] + ) -> DeserializationMethod: + method: DeserializationMethod + if validation and validators: + wrapped, aliaser = factory(constraints, ()), self.aliaser + + def method(data: Any) -> Any: + result = wrapped(data) + validate(result, validators, aliaser=aliaser) + return result + + else: + method = factory(constraints, validators) + if self.coercer is not None and cls is not None: + coercer = self.coercer + + def wrapper(data: Any) -> Any: + assert cls is not None + return method(coercer(cls, data)) + + return wrapper + + else: + return method + + return DeserializationMethodFactory(wrapper, cls) + + def any(self) -> DeserializationMethodFactory: + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + checks = None if constraints is None else constraints.checks_by_type + + def method(data: Any) -> Any: + if checks is not None: + if data.__class__ in checks: + errors = [ + err + for check, attr, err in checks[data.__class__] + if check(data, attr) + ] + if errors: + raise ValidationError(errors) + return data + + return method + + return self._factory(factory) + + def collection( + self, cls: Type[Collection], value_type: AnyType + ) -> DeserializationMethodFactory: + value_factory = self.visit(value_type) + + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + deserialize_value = value_factory.method + checks = get_constraint_checks(constraints, list) + constructor: Optional[Callable[[list], Collection]] = None + if issubclass(cls, AbstractSet): + constructor = set + elif issubclass(cls, tuple): + constructor = tuple + + def method(data: Any) -> Any: + if not isinstance(data, list): + raise bad_type(data, list) + elt_errors: Dict[ErrorKey, ValidationError] = {} + values: list = [None] * len(data) + index = 0 # don't use `enumerate` for performance + for elt in data: + try: + values[index] = deserialize_value(elt) + except ValidationError as err: + elt_errors[index] = err + index += 1 + if checks: + errors = [err for check, attr, err in checks if check(data, attr)] + if errors or elt_errors: + raise ValidationError(errors, elt_errors) + elif elt_errors: + raise ValidationError([], elt_errors) + return constructor(values) if constructor else values + + return method + + return self._factory(factory, list) + + def enum(self, cls: Type[Enum]) -> DeserializationMethodFactory: + return self.literal(list(cls)) + + def literal(self, values: Sequence[Any]) -> DeserializationMethodFactory: + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + value_map = dict(zip(literal_values(values), values)) + types = list(set(map(type, value_map))) if self.coercer else [] + error = f"not one of {list(value_map)}" + coercer = self.coercer + + def method(data: Any) -> Any: + try: + return value_map[data] + except KeyError: + if coercer: + for cls in types: + try: + return value_map[coercer(cls, data)] + except IndexError: + pass + raise ValidationError([error]) + # Unions with Literal can have not hashable data + except TypeError: + raise bad_type(data, *types) + + return method + + return self._factory(factory) + + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> DeserializationMethodFactory: + key_factory, value_factory = self.visit(key_type), self.visit(value_type) + + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + deserialize_key = key_factory.method + deserialize_value = value_factory.method + checks = get_constraint_checks(constraints, dict) + + def method(data: Any) -> Any: + if not isinstance(data, dict): + raise bad_type(data, dict) + item_errors: Dict[ErrorKey, ValidationError] = {} + items = {} + for key, value in data.items(): + assert isinstance(key, str) + try: + items[deserialize_key(key)] = deserialize_value(value) + except ValidationError as err: + item_errors[key] = err + if checks: + errors = [err for check, attr, err in checks if check(data, attr)] + if errors or item_errors: + raise ValidationError(errors, item_errors) + elif item_errors: + raise ValidationError([], item_errors) + return items + + return method + + return self._factory(factory, dict) + + def object( + self, tp: Type, fields: Sequence[ObjectField] + ) -> DeserializationMethodFactory: + field_factories = [ + self.visit_with_conv(f.type, f.deserialization).merge( + get_constraints(f.schema), f.validators + ) + for f in fields + ] + + def factory( + constraints: Optional[Constraints], validators: Sequence[Validator] + ) -> DeserializationMethod: + cls = get_origin_or_type(tp) + alias_by_name = {field.name: self.aliaser(field.alias) for field in fields} + requiring: Dict[str, Set[str]] = defaultdict(set) + for f, reqs in get_dependent_required(cls).items(): + for req in reqs: + requiring[req].add(alias_by_name[f]) + normal_fields, flattened_fields, pattern_fields = [], [], [] + additional_field = None + for field, field_factory in zip(fields, field_factories): + deserialize_field: DeserializationMethod = field_factory.method + fall_back_on_default = ( + field.fall_back_on_default or self.fall_back_on_default + ) + if field.flattened: + flattened_aliases = get_deserialization_flattened_aliases( + cls, field, self.default_conversion + ) + flattened_fields.append( + ( + field.name, + set(map(self.aliaser, flattened_aliases)), + deserialize_field, + fall_back_on_default, + ) + ) + elif field.pattern_properties is not None: + field_pattern = field.pattern_properties + if field_pattern is ...: + field_pattern = infer_pattern( + field.type, self.default_conversion + ) + assert isinstance(field_pattern, Pattern) + pattern_fields.append( + ( + field.name, + field_pattern, + deserialize_field, + fall_back_on_default, + ) + ) + elif field.additional_properties: + additional_field = ( + field.name, + deserialize_field, + fall_back_on_default, + ) + else: + normal_fields.append( + ( + field.name, + self.aliaser(field.alias), + deserialize_field, + field.required, + requiring[field.name], + fall_back_on_default, + ) + ) + has_aggregate_field = ( + flattened_fields or pattern_fields or (additional_field is not None) + ) + post_init_modified = {field.name for field in fields if field.post_init} + checks = get_constraint_checks(constraints, dict) + aliaser = self.aliaser + additional_properties = self.additional_properties + all_aliases = set(alias_by_name.values()) + init_defaults = [ + (f.name, f.default_factory) + for f in fields + if f.kind == FieldKind.WRITE_ONLY + ] + + def method(data: Any) -> Any: + if not isinstance(data, dict): + raise bad_type(data, dict) + values: Dict[str, Any] = {} + fields_count = 0 + errors = ( + [err for check, attr, err in checks if check(data, attr)] + if checks + else [] + ) + field_errors: Dict[ErrorKey, ValidationError] = {} + for ( + name, + alias, + deserialize_field, + required, + required_by, + fall_back_on_default, + ) in normal_fields: + if required: + try: + value = data[alias] + except KeyError: + field_errors[alias] = ValidationError([MISSING_PROPERTY]) + else: + fields_count += 1 + try: + values[name] = deserialize_field(value) + except ValidationError as err: + field_errors[alias] = err + elif alias in data: + fields_count += 1 + try: + values[name] = deserialize_field(data[alias]) + except ValidationError as err: + if not fall_back_on_default: + field_errors[alias] = err + elif required_by and not required_by.isdisjoint(data): + requiring = sorted(required_by & data.keys()) + msg = f"missing property (required by {requiring})" + field_errors[alias] = ValidationError([msg]) + if has_aggregate_field: + remain = data.keys() - all_aliases + for ( + name, + flattened_alias, + deserialize_field, + fall_back_on_default, + ) in flattened_fields: + flattened = { + alias: data[alias] + for alias in flattened_alias + if alias in data + } + remain.difference_update(flattened) + try: + values[name] = deserialize_field(flattened) + except ValidationError as err: + if not fall_back_on_default: + errors.extend(err.messages) + field_errors.update(err.children) + for ( + name, + pattern, + deserialize_field, + fall_back_on_default, + ) in pattern_fields: + matched = { + key: data[key] for key in remain if pattern.match(key) + } + remain.difference_update(matched) + try: + values[name] = deserialize_field(matched) + except ValidationError as err: + if not fall_back_on_default: + errors.extend(err.messages) + field_errors.update(err.children) + if additional_field: + name, deserialize_field, fall_back_on_default = additional_field + additional = {key: data[key] for key in remain} + try: + values[name] = deserialize_field(additional) + except ValidationError as err: + if not fall_back_on_default: + errors.extend(err.messages) + field_errors.update(err.children) + elif remain and not additional_properties: + for key in remain: + field_errors[key] = ValidationError([UNEXPECTED_PROPERTY]) + elif not additional_properties and len(data) != fields_count: + for key in data.keys() - all_aliases: + field_errors[key] = ValidationError([UNEXPECTED_PROPERTY]) + validators2: Sequence[Validator] + if validators: + init: Dict[str, Any] = {} + for name, default_factory in init_defaults: + if name in values: + init[name] = values[name] + elif name not in field_errors: + assert default_factory is not None + init[name] = default_factory() + # Don't keep validators when all dependencies are default + validators2 = [ + v + for v in validators + if not v.dependencies.isdisjoint(values.keys()) + ] + if field_errors or errors: + error = ValidationError(errors, field_errors) + invalid_fields = field_errors.keys() | post_init_modified + try: + validate( + ValidatorMock(cls, values), + [ + v + for v in validators2 + if v.dependencies.isdisjoint(invalid_fields) + ], + init, + aliaser=aliaser, + ) + except ValidationError as err: + error = merge_errors(error, err) + raise error + elif field_errors or errors: + raise ValidationError(errors, field_errors) + else: + validators2, init = (), ... # type: ignore # only for linter + try: + res = cls(**values) + except (AssertionError, ValidationError): + raise + except TypeError as err: + if str(err).startswith("__init__() got"): + raise Unsupported(cls) + else: + raise ValidationError([str(err)]) + except Exception as err: + raise ValidationError([str(err)]) + if validators: + validate(res, validators2, init, aliaser=aliaser) + return res + + return method + + return self._factory(factory, dict, validation=False) + + def primitive(self, cls: Type) -> DeserializationMethodFactory: + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + checks = get_constraint_checks(constraints, cls) + if cls is NoneType: + + def method(data: Any) -> Any: + if data is not None: + raise bad_type(data, cls) + return data + + elif cls is not float and not checks: + + def method(data: Any) -> Any: + if not isinstance(data, cls): + raise bad_type(data, cls) + return data + + elif cls is not float and len(checks) == 1: + ((check, attr, err),) = checks + + def method(data: Any) -> Any: + if not isinstance(data, cls): + raise bad_type(data, cls) + elif check(data, attr): + raise ValidationError([err]) + return data + + else: + is_float = cls is float + + def method(data: Any) -> Any: + if not isinstance(data, cls): + if is_float and isinstance(data, int): + data = float(data) + else: + raise bad_type(data, cls) + if checks: + errors = [ + err for check, attr, err in checks if check(data, attr) + ] + if errors: + raise ValidationError(errors) + return data + + return method + + return self._factory(factory, cls) + + def subprimitive(self, cls: Type, superclass: Type) -> DeserializationMethodFactory: + primitive_factory = self.primitive(superclass) + + def factory( + constraints: Optional[Constraints], validators: Sequence[Validator] + ) -> DeserializationMethod: + deserialize_primitive = primitive_factory.merge( + constraints, validators + ).method + + def method(data: Any) -> Any: + return superclass(deserialize_primitive(data)) + + return method + + return replace(primitive_factory, factory=factory) + + def tuple(self, types: Sequence[AnyType]) -> DeserializationMethodFactory: + elt_factories = [self.visit(tp) for tp in types] + + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + expected_len = len(types) + (_, _, min_err), (_, _, max_err) = Constraints( + min_items=len(types), max_items=len(types) + ).checks_by_type[list] + elt_methods = list(enumerate(fact.method for fact in elt_factories)) + checks = get_constraint_checks(constraints, list) + + def method(data: Any) -> Any: + if not isinstance(data, list): + raise bad_type(data, list) + if len(data) != expected_len: + raise ValidationError([min_err, max_err]) + elt_errors: Dict[ErrorKey, ValidationError] = {} + elts: List[Any] = [None] * expected_len + for i, deserialize_elt in elt_methods: + try: + elts[i] = deserialize_elt(data[i]) + except ValidationError as err: + elt_errors[i] = err + if checks: + errors = [err for check, attr, err in checks if check(data, attr)] + if errors or elt_errors: + raise ValidationError(errors, elt_errors) + elif elt_errors: + raise ValidationError([], elt_errors) + return tuple(elts) + + return method + + return self._factory(factory, list) + + def union(self, alternatives: Sequence[AnyType]) -> DeserializationMethodFactory: + alt_factories = self._union_results(alternatives) + if len(alt_factories) == 1: + return alt_factories[0] + + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + alt_methods = [fact.merge(constraints).method for fact in alt_factories] + # method_by_cls cannot replace alt_methods, because there could be several + # methods for one class + method_by_cls = dict(zip((f.cls for f in alt_factories), alt_methods)) + if NoneType in alternatives and len(alt_methods) == 2: + deserialize_alt = next( + meth + for fact, meth in zip(alt_factories, alt_methods) + if fact.cls is not NoneType + ) + coercer = self.coercer + + def method(data: Any) -> Any: + if data is None: + return None + try: + return deserialize_alt(data) + except ValidationError as err: + if coercer and coercer(NoneType, data) is None: + return None + else: + raise merge_errors(err, bad_type(data, NoneType)) + + elif None not in method_by_cls and len(method_by_cls) == len(alt_factories): + classes = tuple(cls for cls in method_by_cls if cls is not None) + + def method(data: Any) -> Any: + try: + return method_by_cls[data.__class__](data) + except KeyError: + raise bad_type(data, *classes) from None + except ValidationError as err: + other_classes = ( + cls for cls in classes if cls is not data.__class__ + ) + raise merge_errors(err, bad_type(data, *other_classes)) + + else: + + def method(data: Any) -> Any: + error = None + for deserialize_alt in alt_methods: + try: + return deserialize_alt(data) + except ValidationError as err: + error = merge_errors(error, err) + assert error is not None + raise error + + return method + + return self._factory(factory) + + def _visit_conversion( + self, + tp: AnyType, + conversion: Deserialization, + dynamic: bool, + next_conversion: Optional[AnyConversion], + ) -> DeserializationMethodFactory: + assert conversion + conv_factories = [ + self.visit_with_conv(conv.source, sub_conversion(conv, next_conversion)) + for conv in conversion + ] + + def factory(constraints: Optional[Constraints], _) -> DeserializationMethod: + conv_methods = [ + ((fact if dynamic else fact.merge(constraints)).method, conv.converter) + for conv, fact in zip(conversion, conv_factories) + ] + method: DeserializationMethod + if len(conv_methods) == 1: + deserialize_alt, converter = conv_methods[0] + + def method(data: Any) -> Any: + try: + return converter(deserialize_alt(data)) + except (ValidationError, AssertionError): + raise + except Exception as err: + raise ValidationError([str(err)]) + + else: + + def method(data: Any) -> Any: + error: Optional[ValidationError] = None + for deserialize_alt, converter in conv_methods: + try: + value = deserialize_alt(data) + except ValidationError as err: + error = merge_errors(error, err) + else: + try: + return converter(value) + except (ValidationError, AssertionError): + raise + except Exception as err: + raise ValidationError([str(err)]) + assert error is not None + raise error + + return method + + return self._factory(factory, validation=not dynamic) + + def visit_conversion( + self, + tp: AnyType, + conversion: Optional[Deserialization], + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ) -> DeserializationMethodFactory: + factory = super().visit_conversion(tp, conversion, dynamic, next_conversion) + if not dynamic: + factory = factory.merge(get_constraints(get_schema(tp)), get_validators(tp)) + if get_args(tp): + factory = factory.merge( + get_constraints(get_schema(get_origin(tp))), + get_validators(get_origin(tp)), + ) + return factory + + +@cache +def deserialization_method_factory( + tp: AnyType, + additional_properties: bool, + aliaser: Aliaser, + coercer: Optional[Coercer], + conversion: Optional[AnyConversion], + default_conversion: DefaultConversion, + fall_back_on_default: bool, +) -> DeserializationMethodFactory: + return DeserializationMethodVisitor( + additional_properties, + aliaser, + coercer, + default_conversion, + fall_back_on_default, + ).visit_with_conv(tp, conversion) + + +@overload +def deserialization_method( + type: Type[T], + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + coerce: Coerce = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + fall_back_on_default: bool = None, + schema: Schema = None, +) -> DeserializationMethod[T]: + ... + + +@overload +def deserialization_method( + type: AnyType, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + coerce: Coerce = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + fall_back_on_default: bool = None, + schema: Schema = None, +) -> DeserializationMethod: + ... + + +def deserialization_method( + type: AnyType, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + coerce: Coerce = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + fall_back_on_default: bool = None, + schema: Schema = None, +) -> DeserializationMethod: + from apischema import settings + + coercer: Optional[Coercer] = None + if callable(coerce): + coercer = coerce + elif opt_or(coerce, settings.deserialization.coerce): + coercer = settings.deserialization.coercer + return ( + deserialization_method_factory( + type, + opt_or(additional_properties, settings.additional_properties), + opt_or(aliaser, settings.aliaser), + coercer, + conversion, + opt_or(default_conversion, settings.deserialization.default_conversion), + opt_or(fall_back_on_default, settings.deserialization.fall_back_on_default), + ) + .merge(get_constraints(schema), ()) + .method + ) + + +@overload +def deserialize( + type: Type[T], + data: Any, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + coerce: Coerce = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + fall_back_on_default: bool = None, + schema: Schema = None, +) -> T: + ... + + +@overload +def deserialize( + type: AnyType, + data: Any, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + coerce: Coerce = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + fall_back_on_default: bool = None, + schema: Schema = None, +) -> Any: + ... + + +@deprecate_kwargs( + { + "coercion": "coerce", + "conversions": "conversion", + "default_fallback": "fall_back_on_default", + } +) +def deserialize( + type: AnyType, + data: Any, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + coerce: Coerce = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + fall_back_on_default: bool = None, + schema: Schema = None, +) -> Any: + return deserialization_method( + type, + additional_properties=additional_properties, + aliaser=aliaser, + coerce=coerce, + conversion=conversion, + default_conversion=default_conversion, + fall_back_on_default=fall_back_on_default, + schema=schema, + )(data) diff --git a/.venv/lib/python3.9/site-packages/apischema/deserialization/coercion.py b/.venv/lib/python3.9/site-packages/apischema/deserialization/coercion.py new file mode 100644 index 0000000..5eb0ad6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/deserialization/coercion.py @@ -0,0 +1,55 @@ +from typing import Any, Callable, Dict, Type, TypeVar, Union + +from apischema.json_schema.types import bad_type +from apischema.types import NoneType + +T = TypeVar("T") + +Coercer = Callable[[Type[T], Any], T] + +_bool_pairs = ( + ("0", "1"), + ("f", "t"), + ("n", "y"), + ("no", "yes"), + ("false", "true"), + ("off", "on"), + ("ko", "ok"), +) +STR_TO_BOOL: Dict[str, bool] = {} +for false, true in _bool_pairs: + for s, value in ((false, False), (true, True)): + STR_TO_BOOL[s.lower()] = value +STR_NONE_VALUES = {""} + + +def coerce(cls: Type[T], data: Any) -> T: + if cls is NoneType: + if data is None or data in STR_NONE_VALUES: + return None # type: ignore + else: + raise bad_type(data, cls) + elif isinstance(data, cls): + return data + elif cls is bool: + if isinstance(data, str): + return STR_TO_BOOL[data.lower()] # type: ignore + elif isinstance(data, int): + return bool(data) # type: ignore + else: + raise bad_type(data, cls) + elif cls in (int, float): + try: + return cls(data) # type: ignore + except ValueError: + raise bad_type(data, cls) + elif cls is str: + if isinstance(data, (int, float)) and not isinstance(data, bool): + return str(data) # type: ignore + else: + raise bad_type(data, cls) + else: + raise bad_type(data, cls) + + +Coerce = Union[bool, Coercer] diff --git a/.venv/lib/python3.9/site-packages/apischema/deserialization/flattened.py b/.venv/lib/python3.9/site-packages/apischema/deserialization/flattened.py new file mode 100644 index 0000000..51ee4a7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/deserialization/flattened.py @@ -0,0 +1,46 @@ +from typing import Iterator, Mapping, Sequence, Type + +from apischema.conversions.conversions import DefaultConversion +from apischema.conversions.visitor import DeserializationVisitor +from apischema.objects import ObjectField +from apischema.objects.visitor import DeserializationObjectVisitor +from apischema.types import AnyType +from apischema.utils import get_origin_or_type +from apischema.visitor import Unsupported + + +class InitFlattenedAliasVisitor( + DeserializationObjectVisitor[Iterator[str]], DeserializationVisitor[Iterator[str]] +): + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> Iterator[str]: + yield from () + + def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> Iterator[str]: + for field in fields: + if field.flattened: + yield from get_deserialization_flattened_aliases( + get_origin_or_type(tp), field, self.default_conversion + ) + elif not field.is_aggregate: + yield field.alias + + def _visited_union(self, results: Sequence[Iterator[str]]) -> Iterator[str]: + if len(results) != 1: + raise NotImplementedError + return results[0] + + +def get_deserialization_flattened_aliases( + cls: Type, field: ObjectField, default_conversion: DefaultConversion +) -> Iterator[str]: + assert field.flattened + try: + yield from InitFlattenedAliasVisitor(default_conversion).visit_with_conv( + field.type, field.deserialization + ) + except (NotImplementedError, Unsupported): + raise TypeError( + f"Flattened field {cls.__name__}.{field.name} must have an object type" + ) from None diff --git a/.venv/lib/python3.9/site-packages/apischema/fields.py b/.venv/lib/python3.9/site-packages/apischema/fields.py new file mode 100644 index 0000000..015cd50 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/fields.py @@ -0,0 +1,145 @@ +__all__ = ["fields_set", "is_set", "set_fields", "unset_fields", "with_fields_set"] +from dataclasses import ( # type: ignore + Field, + _FIELD, + _FIELDS, + _FIELD_INITVAR, + is_dataclass, +) +from functools import wraps +from inspect import signature +from typing import AbstractSet, Any, Collection, Set, Type, TypeVar, cast + +from apischema.objects.fields import get_field_name +from apischema.utils import PREFIX + +FIELDS_SET_ATTR = f"{PREFIX}fields_set" +_ALREADY_SET = f"{PREFIX}already_set" + +Cls = TypeVar("Cls", bound=Type) + +_fields_set_classes: Set[type] = set() + + +def support_fields_set(cls: type) -> bool: + return any(base in _fields_set_classes for base in cls.__mro__) + + +def with_fields_set(cls: Cls) -> Cls: + from apischema.metadata.keys import DEFAULT_AS_SET_METADATA + + init_fields = set() + post_init_fields = set() + if is_dataclass(cls): + for field in getattr(cls, _FIELDS).values(): + assert isinstance(field, Field) + if field._field_type == _FIELD_INITVAR: # type: ignore + init_fields.add(field.name) + if field._field_type == _FIELD and not field.init: # type: ignore + post_init_fields.add(field.name) + if field.metadata.get(DEFAULT_AS_SET_METADATA): + post_init_fields.add(field.name) + params = list(signature(cls.__init__).parameters)[1:] + old_new = cls.__new__ + old_init = cls.__init__ + old_setattr = cls.__setattr__ + + def new_new(*args, **kwargs): + if old_new is object.__new__: + obj = object.__new__(args[0]) + else: + obj = old_new(*args, **kwargs) + # Initialize FIELD_SET_ATTR in order to prevent inherited class which override + # __init__ to raise in __setattr__ + obj.__dict__[FIELDS_SET_ATTR] = set() + return obj + + def new_init(self, *args, **kwargs): + prev_fields_set = self.__dict__.get(FIELDS_SET_ATTR, set()).copy() + self.__dict__[FIELDS_SET_ATTR] = set() + try: + old_init(self, *args, **kwargs) + except TypeError as err: + if str(err) == no_dataclass_init_error: + raise RuntimeError(dataclass_before_error) from None + else: + raise + arg_fields = {*params[: len(args)], *kwargs} - init_fields + self.__dict__[FIELDS_SET_ATTR] = prev_fields_set | arg_fields | post_init_fields + + def new_setattr(self, attr, value): + try: + self.__dict__[FIELDS_SET_ATTR].add(attr) + except KeyError: + raise RuntimeError(dataclass_before_error) from None + old_setattr(self, attr, value) + + for attr, old, new in [ + ("__new__", old_new, new_new), + ("__init__", old_init, new_init), + ("__setattr__", old_setattr, new_setattr), + ]: + if hasattr(old, _ALREADY_SET): + continue + setattr(new, _ALREADY_SET, True) + setattr(cls, attr, wraps(old)(new)) # type: ignore + + _fields_set_classes.add(cls) + return cls + + +no_dataclass_init_error = ( + "object.__init__() takes exactly one argument (the instance to initialize)" +) +dataclass_before_error = ( + f"{with_fields_set.__name__} must be put before dataclass decorator" +) + + +T = TypeVar("T") + + +def _field_names(fields: Collection) -> AbstractSet[str]: + result: Set[str] = set() + for field in fields: + result.add(get_field_name(field)) + return result + + +def _fields_set(obj: Any) -> Set[str]: + try: + return getattr(obj, FIELDS_SET_ATTR) + except AttributeError: + raise TypeError( + f"Type {obj.__class__} is not decorated" f" with {with_fields_set.__name__}" + ) + + +def set_fields(obj: T, *fields: Any, overwrite=False) -> T: + if overwrite: + _fields_set(obj).clear() + _fields_set(obj).update(map(get_field_name, fields)) + return obj + + +def unset_fields(obj: T, *fields: Any) -> T: + _fields_set(obj).difference_update(map(get_field_name, fields)) + return obj + + +# This could just be an alias with a specified type, but it's better handled by IDE +# like this +def fields_set(obj: Any) -> AbstractSet[str]: + return _fields_set(obj) + + +class FieldIsSet: + def __init__(self, obj: Any): + self.fields_set = fields_set(obj) + + def __getattribute__(self, name: str) -> bool: + return name in object.__getattribute__(self, "fields_set") + + +def is_set(obj: T) -> T: + return cast(T, FieldIsSet(obj)) diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/__init__.py b/.venv/lib/python3.9/site-packages/apischema/graphql/__init__.py new file mode 100644 index 0000000..1ee5c17 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/__init__.py @@ -0,0 +1,23 @@ +__all__ = [ + "ID", + "Mutation", + "Query", + "Subscription", + "graphql_schema", + "interface", + "relay", + "resolver", +] + + +try: + from .schema import ID, Query, Mutation, Subscription, graphql_schema + from .interfaces import interface + from .resolvers import resolver + from . import relay +except ImportError: + raise + raise ImportError( + "GraphQL feature requires graphql-core library\n" + "Run `pip install apischema[graphql]` to install it" + ) diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/interfaces.py b/.venv/lib/python3.9/site-packages/apischema/graphql/interfaces.py new file mode 100644 index 0000000..cb5920d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/interfaces.py @@ -0,0 +1,18 @@ +from typing import Collection, Set, Type, TypeVar + +_interfaces: Set[Type] = set() + +Cls = TypeVar("Cls", bound=Type) + + +def interface(cls: Cls) -> Cls: + _interfaces.add(cls) + return cls + + +def is_interface(cls: Type) -> bool: + return cls in _interfaces + + +def get_interfaces(cls: Type) -> Collection[Type]: + return list(filter(is_interface, cls.__mro__[1:])) diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/relay/__init__.py b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/__init__.py new file mode 100644 index 0000000..801da31 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/__init__.py @@ -0,0 +1,17 @@ +__all__ = [ + "ClientMutationId", + "Connection", + "Edge", + "GlobalId", + "Mutation", + "Node", + "PageInfo", + "base64_encoding", + "mutations", + "node", + "nodes", +] +from .connections import Connection, Edge, PageInfo +from .global_identification import GlobalId, Node, node, nodes +from .mutations import ClientMutationId, Mutation, mutations +from .utils import base64_encoding diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/relay/connections.py b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/connections.py new file mode 100644 index 0000000..50a8ed2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/connections.py @@ -0,0 +1,85 @@ +from dataclasses import dataclass +from typing import Generic, Optional, Sequence, Type, TypeVar + +from apischema.type_names import get_type_name, type_name +from apischema.types import NoneType +from apischema.typing import generic_mro, get_args, get_origin +from apischema.utils import get_args2, is_union_of, wrap_generic_init_subclass + +Cursor_ = TypeVar("Cursor_") +Node_ = TypeVar("Node_") + + +def get_node_name(tp): + if is_union_of(tp, NoneType) and len(get_args2(tp)): + tp = next(arg for arg in get_args2(tp) if arg is not NoneType) + ref = get_type_name(tp).graphql + if ref is None: + raise TypeError( + f"Node {tp} must have a ref registered to be used with connection" + ) + return ref + + +def edge_name(tp: Type["Edge"], *args) -> str: + for base in generic_mro(tp[tuple(args)] if args else tp): # type: ignore + if get_origin(base) == Edge: + return f"{get_node_name(get_args(base)[0])}Edge" + raise NotImplementedError + + +@type_name(graphql=edge_name) +@dataclass +class Edge(Generic[Node_, Cursor_]): + node: Node_ + cursor: Cursor_ + + @wrap_generic_init_subclass + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + type_name(graphql=edge_name)(cls) + + +@type_name(graphql=lambda *_: "PageInfo") +@dataclass +class PageInfo(Generic[Cursor_]): + has_previous_page: bool = False + has_next_page: bool = False + start_cursor: Optional[Cursor_] = None + end_cursor: Optional[Cursor_] = None + + @staticmethod + def from_edges( + edges: Sequence[Optional[Edge[Node_, Cursor_]]], + has_previous_page: bool = False, + has_next_page: bool = False, + ) -> "PageInfo": + start_cursor, end_cursor = None, None + if edges is not None: + if edges[0] is not None: + start_cursor = edges[0].cursor + if edges[-1] is not None: + end_cursor = edges[-1].cursor + return PageInfo(has_previous_page, has_next_page, start_cursor, end_cursor) + + +def connection_name(tp: Type["Connection"], *args) -> str: + for base in generic_mro(tp[tuple(args)] if args else tp): # type: ignore + if get_origin(base) == Connection: + return f"{get_node_name(get_args(base)[0])}Connection" + raise NotImplementedError + + +Edge_ = TypeVar("Edge_", bound=Edge) + + +@type_name(graphql=connection_name) +@dataclass +class Connection(Generic[Node_, Cursor_, Edge_]): + edges: Optional[Sequence[Optional[Edge_]]] + page_info: PageInfo[Cursor_] + + @wrap_generic_init_subclass + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + type_name(graphql=connection_name)(cls) diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/relay/global_identification.py b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/global_identification.py new file mode 100644 index 0000000..ccc92d0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/global_identification.py @@ -0,0 +1,160 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from typing import ( + Awaitable, + ClassVar, + Collection, + Dict, + Generic, + List, + Type, + TypeVar, + Union, + cast, +) + +import graphql + +from apischema import deserialize, deserializer, serialize, serializer, type_name +from apischema.graphql import ID, interface, resolver +from apischema.metadata import skip +from apischema.ordering import order +from apischema.type_names import get_type_name +from apischema.typing import generic_mro, get_args, get_origin +from apischema.utils import PREFIX, has_type_vars, wrap_generic_init_subclass + +ID_TYPE_ATTR = f"{PREFIX}id_type" + + +class InvalidGlobalId(Exception): + def __init__(self, value: str): + self.value = value + + def __str__(self): + return f"{self.value} is not a valid global id" + + +class NotANode(Exception): + def __init__(self, node_type: str): + self.node_type = node_type + + def __str__(self): + return f"{self.node_type} is not a Node" + + +Node_ = TypeVar("Node_", bound="Node") + + +@dataclass +class GlobalId(Generic[Node_]): + id: str + node_type: Type[Node_] + + +@deserializer +def deserialize_global_id(global_id: ID) -> GlobalId: + try: + node_key, id = global_id.split(":") + except ValueError: + raise InvalidGlobalId(global_id) from None + try: + return GlobalId(id, _nodes[node_key]) + except KeyError: + raise NotANode(node_key) from None + + +@serializer +def serialize_global_id(global_id: GlobalId) -> ID: + return ID(f"{global_id.node_type._node_key()}:{global_id.id}") + + +Id = TypeVar("Id") + + +@type_name(graphql=lambda *_: "Node") +@interface +@dataclass # type: ignore +class Node(Generic[Id], ABC): + id: Id = field(metadata=skip) + global_id: ClassVar[property] + + @property # type: ignore + def global_id(self: Node_) -> GlobalId[Node_]: + return self.id_to_global(self.id) + + @classmethod + def id_from_global(cls: Type[Node_], global_id: GlobalId[Node_]) -> Id: + if global_id.node_type != cls: + raise ValueError( + f"Expected {cls.__name__} global id," + f" found {global_id.node_type.__name__} global id" + ) + id_type = getattr(cls, ID_TYPE_ATTR) + # Use coercion to handle integer id + return cast(Id, deserialize(id_type, global_id.id, coerce=True)) + + @classmethod + def id_to_global(cls: Type[Node_], id: Id) -> GlobalId[Node_]: + return GlobalId(str(serialize(getattr(cls, ID_TYPE_ATTR), id)), cls) + + @classmethod + @abstractmethod + def get_by_id( + cls: Type[Node_], id: Id, info: graphql.GraphQLResolveInfo = None + ) -> Union[Node_, Awaitable[Node_]]: + raise NotImplementedError + + @classmethod + def _node_key(cls) -> str: + node_name = get_type_name(cls).graphql + if node_name is None: + raise TypeError(f"Node {cls} has no type_name registered") + return node_name + + @wrap_generic_init_subclass + def __init_subclass__(cls, not_a_node: bool = False, **kwargs): + super().__init_subclass__(**kwargs) # type: ignore + if not not_a_node: + _tmp_nodes.append(cls) + + +resolver("id", order=order(-1))( + Node.global_id +) # cannot directly decorate property because py36 + +_tmp_nodes: List[Type[Node]] = [] +_nodes: Dict[str, Type[Node]] = {} + + +def process_node(node_cls: Type[Node]): + if has_type_vars(node_cls) or node_cls.get_by_id is Node.get_by_id: + return + for base in node_cls.__mro__: + if base != Node and Node.get_by_id.__name__ in base.__dict__: + if not isinstance( + base.__dict__[Node.get_by_id.__name__], (classmethod, staticmethod) + ): + raise TypeError( + f"{node_cls.__name__}.get_by_id must be a" + f" classmethod/staticmethod" + ) + break + for base in generic_mro(node_cls): + if get_origin(base) == Node: + setattr(node_cls, ID_TYPE_ATTR, get_args(base)[0]) + _nodes[node_cls._node_key()] = node_cls + break + else: + raise TypeError("Node type parameter Id must be specialized") + + +def nodes() -> Collection[Type[Node]]: + for node_cls in _tmp_nodes: + process_node(node_cls) + return list(_nodes.values()) + + +def node(id: ID, info: graphql.GraphQLResolveInfo = None) -> Node: + global_id = deserialize_global_id(id) + node_type = global_id.node_type + return node_type.get_by_id(node_type.id_from_global(global_id), info) diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/relay/mutations.py b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/mutations.py new file mode 100644 index 0000000..42a2345 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/mutations.py @@ -0,0 +1,136 @@ +from dataclasses import Field, MISSING, field, make_dataclass +from functools import wraps +from inspect import Parameter, signature +from typing import ( + Awaitable, + Callable, + ClassVar, + Collection, + Iterator, + List, + NewType, + Optional, + Tuple, + Type, + TypeVar, +) + +from graphql.pyutils import camel_to_snake + +from apischema.aliases import alias +from apischema.graphql.schema import Mutation as Mutation_ +from apischema.schemas import Schema +from apischema.serialization.serialized_methods import ErrorHandler +from apischema.type_names import type_name +from apischema.types import AnyType, Undefined +from apischema.typing import get_type_hints +from apischema.utils import is_async, is_union_of, wrap_generic_init_subclass + +ClientMutationId = NewType("ClientMutationId", str) +type_name(None)(ClientMutationId) +CLIENT_MUTATION_ID = "client_mutation_id" +M = TypeVar("M", bound="Mutation") + + +class Mutation: + _error_handler: ClassVar[ErrorHandler] = Undefined + _schema: ClassVar[Optional[Schema]] = None + _client_mutation_id: ClassVar[Optional[bool]] = None + _mutation: ClassVar[Mutation_] # set in __init_subclass__ + + # Mutate is not defined to prevent Mypy warning about signature of superclass + mutate: ClassVar[Callable] + + @wrap_generic_init_subclass + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + if not hasattr(cls, "mutate"): + return + if not isinstance(cls.__dict__["mutate"], (classmethod, staticmethod)): + raise TypeError(f"{cls.__name__}.mutate must be a classmethod/staticmethod") + mutate = getattr(cls, "mutate") + type_name(f"{cls.__name__}Payload")(cls) + types = get_type_hints(mutate, localns={cls.__name__: cls}, include_extras=True) + async_mutate = is_async(mutate, types) + fields: List[Tuple[str, AnyType, Field]] = [] + cmi_param = None + for param_name, param in signature(mutate).parameters.items(): + if param.kind is Parameter.POSITIONAL_ONLY: + raise TypeError("Positional only parameters are not supported") + if param.kind in {Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY}: + if param_name not in types: + raise TypeError("Mutation parameters must be typed") + field_type = types[param_name] + field_ = MISSING if param.default is Parameter.empty else param.default + if is_union_of(field_type, ClientMutationId): + cmi_param = param_name + if cls._client_mutation_id is False: + if field_ is MISSING: + raise TypeError( + "Cannot have a ClientMutationId parameter" + " when _client_mutation_id = False" + ) + continue + elif cls._client_mutation_id is True: + field_ = MISSING + field_ = field(default=field_, metadata=alias(CLIENT_MUTATION_ID)) + fields.append((param_name, field_type, field_)) + field_names = [name for (name, _, _) in fields] + if cmi_param is None and cls._client_mutation_id is not False: + fields.append( + ( + CLIENT_MUTATION_ID, + ClientMutationId + if cls._client_mutation_id + else Optional[ClientMutationId], + MISSING if cls._client_mutation_id else None, + ) + ) + cmi_param = CLIENT_MUTATION_ID + input_cls = make_dataclass(f"{cls.__name__}Input", fields) + + def wrapper(input): + return mutate(**{name: getattr(input, name) for name in field_names}) + + wrapper.__annotations__["input"] = input_cls + wrapper.__annotations__["return"] = Awaitable[cls] if async_mutate else cls + if cls._client_mutation_id is not False: + cls.__annotations__[CLIENT_MUTATION_ID] = input_cls.__annotations__[ + cmi_param + ] + setattr(cls, CLIENT_MUTATION_ID, field(init=False)) + wrapped = wrapper + + if async_mutate: + + async def wrapper(input): + result = await wrapped(input) + setattr(result, CLIENT_MUTATION_ID, getattr(input, cmi_param)) + return result + + else: + + def wrapper(input): + result = wrapped(input) + setattr(result, CLIENT_MUTATION_ID, getattr(input, cmi_param)) + return result + + wrapper = wraps(wrapped)(wrapper) + + cls._mutation = Mutation_( + function=wrapper, + alias=camel_to_snake(cls.__name__), + schema=cls._schema, + error_handler=cls._error_handler, + ) + + +def _mutations(cls: Type[Mutation] = Mutation) -> Iterator[Type[Mutation]]: + for base in cls.__subclasses__(): + if hasattr(base, "_mutation"): + yield base + yield from _mutations(base) + + +def mutations() -> Collection[Mutation_]: + return [mut._mutation for mut in _mutations()] diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/relay/utils.py b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/utils.py new file mode 100644 index 0000000..ee64a24 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/relay/utils.py @@ -0,0 +1,12 @@ +from base64 import b64decode, b64encode + + +def decode_base_64(s: str) -> str: + return b64decode(s).decode() + + +def encode_base64(s: str) -> str: + return b64encode(s.encode()).decode() + + +base64_encoding = (decode_base_64, encode_base64) diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/resolvers.py b/.venv/lib/python3.9/site-packages/apischema/graphql/resolvers.py new file mode 100644 index 0000000..14e0c45 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/resolvers.py @@ -0,0 +1,336 @@ +from collections import defaultdict +from dataclasses import dataclass +from enum import Enum +from functools import lru_cache +from inspect import Parameter, signature +from typing import ( + Any, + Awaitable, + Callable, + Collection, + Dict, + Iterator, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + overload, +) + +import graphql + +from apischema import UndefinedType +from apischema.aliases import Aliaser +from apischema.cache import CacheAwareDict, cache +from apischema.conversions import Conversion +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.deserialization import deserialization_method +from apischema.methods import method_registerer +from apischema.objects import ObjectField +from apischema.ordering import Ordering +from apischema.schemas import Schema +from apischema.serialization import ( + PassThroughOptions, + SerializationMethod, + SerializationMethodVisitor, +) +from apischema.serialization.serialized_methods import ( + ErrorHandler, + SerializedMethod, + _get_methods, + serialized as register_serialized, +) +from apischema.types import AnyType, NoneType, Undefined +from apischema.typing import is_type +from apischema.utils import ( + awaitable_origin, + deprecate_kwargs, + empty_dict, + get_args2, + get_origin_or_type2, + identity, + is_async, + is_union_of, + keep_annotations, +) +from apischema.validation.errors import ValidationError + + +class PartialSerializationMethodVisitor(SerializationMethodVisitor): + use_cache = False + + @property + def _factory(self) -> Callable[[type], SerializationMethod]: + return lambda _: identity + + def enum(self, cls: Type[Enum]) -> SerializationMethod: + return identity + + def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> SerializationMethod: + return identity + + def visit(self, tp: AnyType) -> SerializationMethod: + if tp is UndefinedType: + return lambda obj: None + return super().visit(tp) + + +@cache +def partial_serialization_method_factory( + aliaser: Aliaser, + conversion: Optional[AnyConversion], + default_conversion: DefaultConversion, +) -> Callable[[AnyType], SerializationMethod]: + @lru_cache() + def factory(tp: AnyType) -> SerializationMethod: + return PartialSerializationMethodVisitor( + additional_properties=False, + aliaser=aliaser, + check_type=False, + default_conversion=default_conversion, + exclude_defaults=False, + exclude_none=False, + exclude_unset=False, + fall_back_on_any=False, + pass_through_options=PassThroughOptions(), + ).visit_with_conv(tp, conversion) + + return factory + + +def unwrap_awaitable(tp: AnyType) -> AnyType: + if get_origin_or_type2(tp) == awaitable_origin: + return keep_annotations(get_args2(tp)[0] if get_args2(tp) else Any, tp) + else: + return tp + + +@dataclass(frozen=True) +class Resolver(SerializedMethod): + parameters: Sequence[Parameter] + parameters_metadata: Mapping[str, Mapping] + + def error_type(self) -> AnyType: + return unwrap_awaitable(super().error_type()) + + def return_type(self, return_type: AnyType) -> AnyType: + return super().return_type(unwrap_awaitable(return_type)) + + +_resolvers: MutableMapping[Type, Dict[str, Resolver]] = CacheAwareDict( + defaultdict(dict) +) + + +def get_resolvers(tp: AnyType) -> Collection[Tuple[Resolver, Mapping[str, AnyType]]]: + return _get_methods(tp, _resolvers) + + +def none_error_handler( + __error: Exception, __obj: Any, __info: graphql.GraphQLResolveInfo, **kwargs +) -> None: + return None + + +def resolver_parameters( + resolver: Callable, *, check_first: bool +) -> Iterator[Parameter]: + first = True + for param in signature(resolver).parameters.values(): + if param.kind is Parameter.POSITIONAL_ONLY: + raise TypeError("Resolver can not have positional only parameters") + if param.kind in {Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY}: + if param.annotation is Parameter.empty and (check_first or not first): + raise TypeError("Resolver parameters must be typed") + yield param + first = False + + +MethodOrProp = TypeVar("MethodOrProp", Callable, property) + + +@overload +def resolver(__method_or_property: MethodOrProp) -> MethodOrProp: + ... + + +@overload +def resolver( + alias: str = None, + *, + conversion: AnyConversion = None, + error_handler: ErrorHandler = Undefined, + order: Optional[Ordering] = None, + schema: Schema = None, + parameters_metadata: Mapping[str, Mapping] = None, + serialized: bool = False, + owner: Type = None, +) -> Callable[[MethodOrProp], MethodOrProp]: + ... + + +@deprecate_kwargs({"conversions": "conversion"}) +def resolver( + __arg=None, + *, + alias: str = None, + conversion: AnyConversion = None, + error_handler: ErrorHandler = Undefined, + order: Optional[Ordering] = None, + schema: Schema = None, + parameters_metadata: Mapping[str, Mapping] = None, + serialized: bool = False, + owner: Type = None, +): + def register(func: Callable, owner: Type, alias2: str): + alias2 = alias or alias2 + _, *parameters = resolver_parameters(func, check_first=owner is None) + error_handler2 = error_handler + if error_handler2 is None: + error_handler2 = none_error_handler + elif error_handler2 is Undefined: + error_handler2 = None + resolver = Resolver( + func, + alias2, + conversion, + error_handler2, + order, + schema, + parameters, + parameters_metadata or {}, + ) + _resolvers[owner][alias2] = resolver + if serialized: + if is_async(func): + raise TypeError("Async resolver cannot be used as a serialized method") + try: + register_serialized( + alias=alias2, + conversion=conversion, + schema=schema, + error_handler=error_handler, + owner=owner, + )(func) + except Exception: + raise TypeError("Resolver cannot be used as a serialized method") + + if isinstance(__arg, str): + alias = __arg + __arg = None + return method_registerer(__arg, owner, register) + + +T = TypeVar("T") +U = TypeVar("U") + + +def as_async(func: Callable[[T], U]) -> Callable[[Awaitable[T]], Awaitable[U]]: + async def wrapper(arg: Awaitable[T]) -> U: + return func(await arg) + + return wrapper + + +def resolver_resolve( + resolver: Resolver, + types: Mapping[str, AnyType], + aliaser: Aliaser, + default_deserialization: DefaultConversion, + default_serialization: DefaultConversion, + serialized: bool = True, +) -> Callable: + # graphql deserialization will give Enum objects instead of strings + def handle_enum(tp: AnyType) -> Optional[AnyConversion]: + if is_type(tp) and issubclass(tp, Enum): + return Conversion(identity, source=Any, target=tp) + return default_deserialization(tp) + + parameters, info_parameter = [], None + for param in resolver.parameters: + param_type = types[param.name] + if is_union_of(param_type, graphql.GraphQLResolveInfo): + info_parameter = param.name + else: + param_field = ObjectField( + param.name, + param_type, + param.default is Parameter.empty, + resolver.parameters_metadata.get(param.name, empty_dict), + param.default, + ) + deserializer = deserialization_method( + param_type, + additional_properties=False, + aliaser=aliaser, + coerce=False, + conversion=param_field.deserialization, + default_conversion=handle_enum, + fall_back_on_default=False, + schema=param_field.schema, + ) + opt_param = is_union_of(param_type, NoneType) or param.default is None + parameters.append( + ( + aliaser(param_field.alias), + param.name, + deserializer, + opt_param, + param_field.required, + ) + ) + func, error_handler = resolver.func, resolver.error_handler + method_factory = partial_serialization_method_factory( + aliaser, resolver.conversion, default_serialization + ) + + serialize_result: Callable[[Any], Any] + if not serialized: + serialize_result = identity + elif is_async(resolver.func): + serialize_result = as_async(method_factory(types["return"])) + else: + serialize_result = method_factory(types["return"]) + serialize_error: Optional[Callable[[Any], Any]] + if error_handler is None: + serialize_error = None + elif is_async(error_handler): + serialize_error = as_async(method_factory(resolver.error_type())) + else: + serialize_error = method_factory(resolver.error_type()) + + def resolve(__self, __info, **kwargs): + values = {} + errors: Dict[str, ValidationError] = {} + for alias, param_name, deserializer, opt_param, required in parameters: + if alias in kwargs: + # It is possible for the parameter to be non-optional in Python + # type hints but optional in the generated schema. In this case + # we should ignore it. + # See: https://github.com/wyfo/apischema/pull/130#issuecomment-845497392 + if not opt_param and kwargs[alias] is None: + assert not required + continue + try: + values[param_name] = deserializer(kwargs[alias]) + except ValidationError as err: + errors[aliaser(param_name)] = err + elif opt_param and required: + values[param_name] = None + + if errors: + raise ValueError(ValidationError(children=errors).errors) + if info_parameter: + values[info_parameter] = __info + try: + return serialize_result(func(__self, **values)) + except Exception as error: + if error_handler is None: + raise + assert serialize_error is not None + return serialize_error(error_handler(error, __self, __info, **kwargs)) + + return resolve diff --git a/.venv/lib/python3.9/site-packages/apischema/graphql/schema.py b/.venv/lib/python3.9/site-packages/apischema/graphql/schema.py new file mode 100644 index 0000000..2b2a4fc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/graphql/schema.py @@ -0,0 +1,1040 @@ +from dataclasses import dataclass, field as field_, replace +from enum import Enum +from functools import wraps +from inspect import Parameter, iscoroutinefunction +from itertools import chain +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Callable, + Collection, + Dict, + Generic, + Iterable, + List, + Mapping, + NewType, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +import graphql + +from apischema import settings +from apischema.aliases import Aliaser +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.conversions.visitor import ( + Conv, + Deserialization, + DeserializationVisitor, + Serialization, + SerializationVisitor, +) +from apischema.graphql.interfaces import get_interfaces, is_interface +from apischema.graphql.resolvers import ( + Resolver, + get_resolvers, + none_error_handler, + partial_serialization_method_factory, + resolver_parameters, + resolver_resolve, +) +from apischema.json_schema.schema import get_field_schema, get_method_schema, get_schema +from apischema.metadata.keys import SCHEMA_METADATA +from apischema.objects import ObjectField +from apischema.objects.visitor import ( + DeserializationObjectVisitor, + ObjectVisitor, + SerializationObjectVisitor, +) +from apischema.ordering import Ordering, sort_by_order +from apischema.recursion import RecursiveConversionsVisitor +from apischema.schemas import Schema, merge_schema +from apischema.serialization import SerializationMethod, serialize +from apischema.serialization.serialized_methods import ErrorHandler +from apischema.type_names import TypeName, TypeNameFactory, get_type_name +from apischema.types import AnyType, NoneType, OrderedDict, Undefined, UndefinedType +from apischema.typing import get_args, get_origin, is_annotated +from apischema.utils import ( + Lazy, + as_predicate, + context_setter, + deprecate_kwargs, + empty_dict, + get_args2, + get_origin2, + get_origin_or_type, + identity, + is_union_of, + to_camel_case, +) + +JsonScalar = graphql.GraphQLScalarType("JSON") +if graphql.version_info >= (3, 1, 2): + JsonScalar.specified_by_url = ( + "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf" + ) +GRAPHQL_PRIMITIVE_TYPES = { + int: graphql.GraphQLInt, + float: graphql.GraphQLFloat, + str: graphql.GraphQLString, + bool: graphql.GraphQLBoolean, +} + +ID = NewType("ID", str) + + +class MissingName(Exception): + pass + + +class Nullable(Exception): + pass + + +T = TypeVar("T") +Thunk = Union[Callable[[], T], T] + +TypeThunk = Thunk[graphql.GraphQLType] + + +def exec_thunk(thunk: TypeThunk, *, non_null=None) -> Any: + result = thunk if isinstance(thunk, graphql.GraphQLType) else thunk() + if non_null is True and not isinstance(result, graphql.GraphQLNonNull): + return graphql.GraphQLNonNull(result) # type: ignore + if non_null is False and isinstance(result, graphql.GraphQLNonNull): + return result.of_type + return result + + +def get_parameter_schema( + func: Callable, parameter: Parameter, field: ObjectField +) -> Optional[Schema]: + from apischema import settings + + return merge_schema( + settings.base_schema.parameter(func, parameter, field.alias), field.schema + ) + + +def merged_schema( + schema: Optional[Schema], tp: Optional[AnyType] +) -> Tuple[Optional[Schema], Mapping[str, Any]]: + if is_annotated(tp): + for annotation in reversed(get_args(tp)[1:]): + if isinstance(annotation, TypeNameFactory): + break + elif isinstance(annotation, Mapping) and SCHEMA_METADATA in annotation: + schema = merge_schema(annotation[SCHEMA_METADATA], schema) + schema_dict: Dict[str, Any] = {} + if schema is not None: + schema.merge_into(schema_dict) + return schema, schema_dict + + +def get_description( + schema: Optional[Schema], tp: Optional[AnyType] = None +) -> Optional[str]: + _, schema_dict = merged_schema(schema, tp) + return schema_dict.get("description") + + +def get_deprecated( + schema: Optional[Schema], tp: Optional[AnyType] = None +) -> Optional[str]: + schema, schema_dict = merged_schema(schema, tp) + if not schema_dict.get("deprecated", False): + return None + while schema is not None: + if schema.annotations is not None: + if isinstance(schema.annotations.deprecated, str): + return schema.annotations.deprecated + elif schema.annotations.deprecated: + return graphql.DEFAULT_DEPRECATION_REASON + schema = schema.child + return graphql.DEFAULT_DEPRECATION_REASON + + +@dataclass(frozen=True) +class ResolverField: + resolver: Resolver + types: Mapping[str, AnyType] + parameters: Sequence[Parameter] + metadata: Mapping[str, Mapping] + subscribe: Optional[Callable] = None + + +IdPredicate = Callable[[AnyType], bool] +UnionNameFactory = Callable[[Sequence[str]], str] + + +GraphQLTp = TypeVar("GraphQLTp", graphql.GraphQLInputType, graphql.GraphQLOutputType) + +FactoryFunction = Callable[[Optional[str], Optional[str]], GraphQLTp] + + +@dataclass(frozen=True) +class TypeFactory(Generic[GraphQLTp]): + factory: FactoryFunction[GraphQLTp] + name: Optional[str] = None + description: Optional[str] = None + # non_null cannot be a field because it can not be forward to factories called in + # wrapping factories (e.g. recursive wrapper) + + def merge( + self, type_name: TypeName = TypeName(), schema: Optional[Schema] = None + ) -> "TypeFactory[GraphQLTp]": + if type_name == TypeName() and schema is None: + return self + return replace( + self, + name=type_name.graphql or self.name, + description=get_description(schema) or self.description, + ) + + @property + def type(self) -> GraphQLTp: + return self.factory(self.name, self.description) # type: ignore + + @property + def raw_type(self) -> GraphQLTp: + tp = self.type + return tp.of_type if isinstance(tp, graphql.GraphQLNonNull) else tp + + +def unwrap_name(name: Optional[str], tp: AnyType) -> str: + if name is None: + raise TypeError(f"Missing name for {tp}") + return name + + +Method = TypeVar("Method", bound=Callable[..., TypeFactory]) + + +def cache_type(method: Method) -> Method: + @wraps(method) + def wrapper(self: "SchemaBuilder", *args, **kwargs): + factory = method(self, *args, **kwargs) + + @wraps(factory.factory) # type: ignore + def name_cache( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLNonNull: + if name is None: + return graphql.GraphQLNonNull(factory.factory(name, description)) # type: ignore + # Method is in cache key because scalar types will have the same method, + # and then be shared by both visitors, while input/output types will have + # their own cache entry. + if (name, method, description) in self._cache_by_name: + tp, cached_args = self._cache_by_name[(name, method, description)] + if cached_args == (args, kwargs): + return tp + tp = graphql.GraphQLNonNull(factory.factory(name, description)) # type: ignore + # Don't put args in cache in order to avoid hashable issue + self._cache_by_name[(name, method, description)] = (tp, (args, kwargs)) + return tp + + return replace(factory, factory=name_cache) + + return cast(Method, wrapper) + + +class SchemaBuilder( + RecursiveConversionsVisitor[Conv, TypeFactory[GraphQLTp]], + ObjectVisitor[TypeFactory[GraphQLTp]], +): + types: Tuple[Type[graphql.GraphQLType], ...] + + def __init__( + self, + aliaser: Aliaser, + enum_aliaser: Aliaser, + enum_schemas: Mapping[Enum, Schema], + default_conversion: DefaultConversion, + id_type: graphql.GraphQLScalarType, + is_id: Optional[IdPredicate], + ): + super().__init__(default_conversion) + self.aliaser = aliaser + self.enum_aliaser = enum_aliaser + self.enum_schemas = enum_schemas + self.id_type = id_type + self.is_id = is_id or (lambda t: False) + self._cache_by_name: Dict[ + Tuple[str, Callable, Optional[str]], + Tuple[graphql.GraphQLNonNull, Tuple[tuple, dict]], + ] = {} + + def _recursive_result( + self, lazy: Lazy[TypeFactory[GraphQLTp]] + ) -> TypeFactory[GraphQLTp]: + def factory(name: Optional[str], description: Optional[str]) -> GraphQLTp: + cached_fact = lazy() + return cached_fact.factory( # type: ignore + name or cached_fact.name, description or cached_fact.description + ) + + return TypeFactory(factory) + + def annotated( + self, tp: AnyType, annotations: Sequence[Any] + ) -> TypeFactory[GraphQLTp]: + factory = super().annotated(tp, annotations) + type_name = False + for annotation in reversed(annotations): + if isinstance(annotation, TypeNameFactory): + if type_name: + break + type_name = True + factory = factory.merge(annotation.to_type_name(tp)) + if isinstance(annotation, Mapping): + if type_name: + factory = factory.merge(schema=annotation.get(SCHEMA_METADATA)) + return factory # type: ignore + + @cache_type + def any(self) -> TypeFactory[GraphQLTp]: + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLScalarType: + if name is None: + return JsonScalar + else: + return graphql.GraphQLScalarType(name, description=description) + + return TypeFactory(factory) + + @cache_type + def collection( + self, cls: Type[Collection], value_type: AnyType + ) -> TypeFactory[GraphQLTp]: + return TypeFactory(lambda *_: graphql.GraphQLList(self.visit(value_type).type)) + + @cache_type + def enum(self, cls: Type[Enum]) -> TypeFactory[GraphQLTp]: + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLEnumType: + return graphql.GraphQLEnumType( + unwrap_name(name, cls), + { + self.enum_aliaser(name): graphql.GraphQLEnumValue( + member, + get_description(self.enum_schemas.get(member)), + get_deprecated(self.enum_schemas.get(member)), + ) + for name, member in cls.__members__.items() + }, + description=description, + ) + + return TypeFactory(factory) + + @cache_type + def literal(self, values: Sequence[Any]) -> TypeFactory[GraphQLTp]: + from apischema.typing import Literal + + if not all(isinstance(v, str) for v in values): + raise TypeError("apischema GraphQL only support Literal of strings") + + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLEnumType: + return graphql.GraphQLEnumType( + unwrap_name(name, Literal[tuple(values)]), # type: ignore + dict(zip(map(self.enum_aliaser, values), values)), + description=description, + ) + + return TypeFactory(factory) + + @cache_type + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> TypeFactory[GraphQLTp]: + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLScalarType: + if name is not None: + return graphql.GraphQLScalarType(name, description=description) + else: + return JsonScalar + + return TypeFactory(factory) + + def object( + self, tp: AnyType, fields: Sequence[ObjectField] + ) -> TypeFactory[GraphQLTp]: + raise NotImplementedError + + @cache_type + def primitive(self, cls: Type) -> TypeFactory[GraphQLTp]: + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLScalarType: + assert cls is not NoneType + if name is not None: + return graphql.GraphQLScalarType(name, description=description) + else: + return GRAPHQL_PRIMITIVE_TYPES[cls] + + return TypeFactory(factory) + + def tuple(self, types: Sequence[AnyType]) -> TypeFactory[GraphQLTp]: + raise TypeError("Tuple are not supported") + + def union(self, alternatives: Sequence[AnyType]) -> TypeFactory[GraphQLTp]: + factories = self._union_results( + (alt for alt in alternatives if alt is not NoneType) + ) + if len(factories) == 1: + factory = factories[0] + else: + factory = self._visited_union(factories) + if NoneType in alternatives or UndefinedType in alternatives: + + def nullable(name: Optional[str], description: Optional[str]) -> GraphQLTp: + res = factory.factory(name, description) # type: ignore + return res.of_type if isinstance(res, graphql.GraphQLNonNull) else res + + return replace(factory, factory=nullable) + else: + return factory + + def visit_conversion( + self, + tp: AnyType, + conversion: Optional[Conv], + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ) -> TypeFactory[GraphQLTp]: + if not dynamic and self.is_id(tp) or tp == ID: + return TypeFactory(lambda *_: graphql.GraphQLNonNull(self.id_type)) + factory = super().visit_conversion(tp, conversion, dynamic, next_conversion) + if not dynamic: + factory = factory.merge(get_type_name(tp), get_schema(tp)) + if get_args(tp): + factory = factory.merge(schema=get_schema(get_origin(tp))) + return factory # type: ignore + + +FieldType = TypeVar("FieldType", graphql.GraphQLInputField, graphql.GraphQLField) + + +class BaseField(Generic[FieldType]): + name: str + ordering: Optional[Ordering] + + def items(self) -> Iterable[Tuple[str, FieldType]]: + raise NotImplementedError + + +@dataclass +class NormalField(BaseField[FieldType]): + alias: str + name: str + field: Lazy[FieldType] + ordering: Optional[Ordering] + + def items(self) -> Iterable[Tuple[str, FieldType]]: + yield self.alias, self.field() + + +@dataclass +class FlattenedField(BaseField[FieldType]): + name: str + ordering: Optional[Ordering] + type: TypeFactory + + def items(self) -> Iterable[Tuple[str, FieldType]]: + tp = self.type.raw_type + if not isinstance( + tp, + ( + graphql.GraphQLObjectType, + graphql.GraphQLInterfaceType, + graphql.GraphQLInputObjectType, + ), + ): + raise FlattenedError(self) + yield from tp.fields.items() + + +class FlattenedError(Exception): + def __init__(self, field: FlattenedField): + self.field = field + + +def merge_fields(cls: type, fields: Sequence[BaseField]) -> Dict[str, FieldType]: + try: + sorted_fields = sort_by_order( + cls, fields, lambda f: f.name, lambda f: f.ordering + ) + except FlattenedError as err: + raise TypeError( + f"Flattened field {cls.__name__}.{err.field.name}" + f" must have an object type" + ) + return OrderedDict(chain.from_iterable(map(lambda f: f.items(), sorted_fields))) + + +class InputSchemaBuilder( + SchemaBuilder[Deserialization, graphql.GraphQLInputType], + DeserializationVisitor[TypeFactory[graphql.GraphQLInputType]], + DeserializationObjectVisitor[TypeFactory[graphql.GraphQLInputType]], +): + types = graphql.type.definition.graphql_input_types + + def _field( + self, tp: AnyType, field: ObjectField + ) -> Lazy[graphql.GraphQLInputField]: + field_type = field.type + field_default = graphql.Undefined if field.required else field.get_default() + default: Any = graphql.Undefined + # Don't put `null` default + handle Undefined as None + if field_default in {None, Undefined}: + field_type = Optional[field_type] + elif field_default is not graphql.Undefined: + try: + default = serialize( + field_type, + field_default, + aliaser=self.aliaser, + conversion=field.deserialization, + ) + except Exception: + field_type = Optional[field_type] + factory = self.visit_with_conv(field_type, field.deserialization) + return lambda: graphql.GraphQLInputField( + factory.type, # type: ignore + default_value=default, + description=get_description(get_field_schema(tp, field), field.type), + ) + + @cache_type + def object( + self, tp: AnyType, fields: Sequence[ObjectField] + ) -> TypeFactory[graphql.GraphQLInputType]: + visited_fields: List[BaseField] = [] + for field in fields: + if not field.is_aggregate: + normal_field = NormalField( + self.aliaser(field.alias), + field.name, + self._field(tp, field), + field.ordering, + ) + visited_fields.append(normal_field) + elif field.flattened: + flattened_fields = FlattenedField( + field.name, + field.ordering, + self.visit_with_conv(field.type, field.deserialization), + ) + visited_fields.append(flattened_fields) + + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLInputObjectType: + name = unwrap_name(name, tp) + if not name.endswith("Input"): + name += "Input" + return graphql.GraphQLInputObjectType( + name, + lambda: merge_fields(get_origin_or_type(tp), visited_fields), + description, + ) + + return TypeFactory(factory) + + def _visited_union( + self, results: Sequence[TypeFactory] + ) -> TypeFactory[graphql.GraphQLInputType]: + # Check must be done here too because _union_result is used by visit_conversion + if len(results) != 1: + raise TypeError("Union are not supported for input") + return results[0] + + +Func = TypeVar("Func", bound=Callable) + + +class OutputSchemaBuilder( + SchemaBuilder[Serialization, graphql.GraphQLOutputType], + SerializationVisitor[TypeFactory[graphql.GraphQLOutputType]], + SerializationObjectVisitor[TypeFactory[graphql.GraphQLOutputType]], +): + types = graphql.type.definition.graphql_output_types + + def __init__( + self, + aliaser: Aliaser, + enum_aliaser: Aliaser, + enum_schemas: Mapping[Enum, Schema], + default_conversion: DefaultConversion, + id_type: graphql.GraphQLScalarType, + is_id: Optional[IdPredicate], + union_name_factory: UnionNameFactory, + default_deserialization: DefaultConversion, + ): + super().__init__( + aliaser, enum_aliaser, enum_schemas, default_conversion, id_type, is_id + ) + self.union_name_factory = union_name_factory + self.input_builder = InputSchemaBuilder( + self.aliaser, + self.enum_aliaser, + self.enum_schemas, + default_deserialization, + self.id_type, + self.is_id, + ) + # Share the same cache for input_builder in order to share scalar types + self.input_builder._cache_by_name = self._cache_by_name + self.get_flattened: Optional[Callable[[Any], Any]] = None + + def _field_serialization_method(self, field: ObjectField) -> SerializationMethod: + return partial_serialization_method_factory( + self.aliaser, field.serialization, self.default_conversion + )(Optional[field.type] if field.none_as_undefined else field.type) + + def _wrap_resolve(self, resolve: Func) -> Func: + if self.get_flattened is None: + return resolve + else: + get_flattened = self.get_flattened + + def resolve_wrapper(__obj, __info, **kwargs): + return resolve(get_flattened(__obj), __info, **kwargs) + + return cast(Func, resolve_wrapper) + + def _field(self, tp: AnyType, field: ObjectField) -> Lazy[graphql.GraphQLField]: + field_name = field.name + partial_serialize = self._field_serialization_method(field) + + @self._wrap_resolve + def resolve(obj, _): + return partial_serialize(getattr(obj, field_name)) + + factory = self.visit_with_conv(field.type, field.serialization) + field_schema = get_field_schema(tp, field) + return lambda: graphql.GraphQLField( + factory.type, + None, + resolve, + description=get_description(field_schema, field.type), + deprecation_reason=get_deprecated(field_schema, field.type), + ) + + def _resolver( + self, tp: AnyType, field: ResolverField + ) -> Lazy[graphql.GraphQLField]: + resolve = self._wrap_resolve( + resolver_resolve( + field.resolver, + field.types, + self.aliaser, + self.input_builder.default_conversion, + self.default_conversion, + ) + ) + args = None + if field.parameters is not None: + args = {} + for param in field.parameters: + default: Any = graphql.Undefined + param_type = field.types[param.name] + if is_union_of(param_type, graphql.GraphQLResolveInfo): + break + param_field = ObjectField( + param.name, + param_type, + param.default is Parameter.empty, + field.metadata.get(param.name, empty_dict), + default=param.default, + ) + if param_field.required: + pass + # Don't put `null` default + handle Undefined as None + # also https://github.com/python/typing/issues/775 + elif param.default in {None, Undefined}: + param_type = Optional[param_type] + # param.default == graphql.Undefined means the parameter is required + # even if it has a default + elif param.default not in {Parameter.empty, graphql.Undefined}: + try: + default = serialize( + param_type, + param.default, + fall_back_on_any=False, + check_type=True, + ) + except Exception: + param_type = Optional[param_type] + arg_factory = self.input_builder.visit_with_conv( + param_type, param_field.deserialization + ) + description = get_description( + get_parameter_schema(field.resolver.func, param, param_field), + param_field.type, + ) + + def arg_thunk( + arg_factory=arg_factory, default=default, description=description + ) -> graphql.GraphQLArgument: + return graphql.GraphQLArgument( + arg_factory.type, default, description + ) + + args[self.aliaser(param_field.alias)] = arg_thunk + factory = self.visit_with_conv(field.types["return"], field.resolver.conversion) + field_schema = get_method_schema(tp, field.resolver) + return lambda: graphql.GraphQLField( + factory.type, # type: ignore + {name: arg() for name, arg in args.items()} if args else None, + resolve, + field.subscribe, + get_description(field_schema), + get_deprecated(field_schema), + ) + + def _visit_flattened( + self, field: ObjectField + ) -> TypeFactory[graphql.GraphQLOutputType]: + get_prev_flattened = ( + self.get_flattened if self.get_flattened is not None else identity + ) + field_name = field.name + partial_serialize = self._field_serialization_method(field) + + def get_flattened(obj): + return partial_serialize(getattr(get_prev_flattened(obj), field_name)) + + with context_setter(self): + self.get_flattened = get_flattened + return self.visit_with_conv(field.type, field.serialization) + + @cache_type + def object( + self, + tp: AnyType, + fields: Sequence[ObjectField], + resolvers: Sequence[ResolverField] = (), + ) -> TypeFactory[graphql.GraphQLOutputType]: + cls = get_origin_or_type(tp) + visited_fields: List[BaseField[graphql.GraphQLField]] = [] + flattened_factories = [] + for field in fields: + if not field.is_aggregate: + normal_field = NormalField( + self.aliaser(field.name), + field.name, + self._field(tp, field), + field.ordering, + ) + visited_fields.append(normal_field) + elif field.flattened: + flattened_factory = self._visit_flattened(field) + flattened_factories.append(flattened_factory) + visited_fields.append( + FlattenedField(field.name, field.ordering, flattened_factory) + ) + resolvers = list(resolvers) + for resolver, types in get_resolvers(tp): + resolver_field = ResolverField( + resolver, types, resolver.parameters, resolver.parameters_metadata + ) + resolvers.append(resolver_field) + for resolver_field in resolvers: + normal_field = NormalField( + self.aliaser(resolver_field.resolver.alias), + resolver_field.resolver.func.__name__, + self._resolver(tp, resolver_field), + resolver_field.resolver.ordering, + ) + visited_fields.append(normal_field) + + interface_thunk = None + interfaces = list(map(self.visit, get_interfaces(cls))) + if interfaces or flattened_factories: + + def interface_thunk() -> Collection[graphql.GraphQLInterfaceType]: + all_interfaces = { + cast(graphql.GraphQLInterfaceType, i.raw_type) for i in interfaces + } + for flattened_factory in flattened_factories: + flattened = cast( + Union[graphql.GraphQLObjectType, graphql.GraphQLInterfaceType], + flattened_factory.raw_type, + ) + if isinstance(flattened, graphql.GraphQLObjectType): + all_interfaces.update(flattened.interfaces) + elif isinstance(flattened, graphql.GraphQLInterfaceType): + all_interfaces.add(flattened) + return sorted(all_interfaces, key=lambda i: i.name) + + def factory( + name: Optional[str], description: Optional[str] + ) -> Union[graphql.GraphQLObjectType, graphql.GraphQLInterfaceType]: + name = unwrap_name(name, cls) + if is_interface(cls): + return graphql.GraphQLInterfaceType( + name, + lambda: merge_fields(cls, visited_fields), + interface_thunk, + description=description, + ) + else: + return graphql.GraphQLObjectType( + name, + lambda: merge_fields(cls, visited_fields), + interface_thunk, + is_type_of=lambda obj, _: isinstance(obj, cls), + description=description, + ) + + return TypeFactory(factory) + + def typed_dict( + self, tp: Type, types: Mapping[str, AnyType], required_keys: Collection[str] + ) -> TypeFactory[graphql.GraphQLOutputType]: + raise TypeError("TypedDict are not supported in output schema") + + @cache_type + def _visited_union( + self, results: Sequence[TypeFactory] + ) -> TypeFactory[graphql.GraphQLOutputType]: + def factory( + name: Optional[str], description: Optional[str] + ) -> graphql.GraphQLOutputType: + types = [factory.raw_type for factory in results] + if name is None: + name = self.union_name_factory([t.name for t in types]) + return graphql.GraphQLUnionType(name, types, description=description) + + return TypeFactory(factory) + + +async_iterable_origins = set(map(get_origin, (AsyncIterable[Any], AsyncIterator[Any]))) + +_fake_type = cast(type, ...) + + +@dataclass(frozen=True) +class Operation(Generic[T]): + function: Callable[..., T] + alias: Optional[str] = None + conversion: Optional[AnyConversion] = None + error_handler: ErrorHandler = Undefined + order: Optional[Ordering] = None + schema: Optional[Schema] = None + parameters_metadata: Mapping[str, Mapping] = field_(default_factory=dict) + + +class Query(Operation): + pass + + +class Mutation(Operation): + pass + + +@dataclass(frozen=True) +class Subscription(Operation[AsyncIterable]): + resolver: Optional[Callable] = None + + +Op = TypeVar("Op", bound=Operation) + + +def operation_resolver(operation: Union[Callable, Op], op_class: Type[Op]) -> Resolver: + if not isinstance(operation, op_class): + operation = op_class(operation) # type: ignore + error_handler: Optional[Callable] + if operation.error_handler is Undefined: + error_handler = None + elif operation.error_handler is None: + error_handler = none_error_handler + else: + error_handler = operation.error_handler + op = operation.function + if iscoroutinefunction(op): + + async def wrapper(_, *args, **kwargs): + return await op(*args, **kwargs) + + else: + + def wrapper(_, *args, **kwargs): + return op(*args, **kwargs) + + wrapper.__annotations__ = op.__annotations__ + + (*parameters,) = resolver_parameters(operation.function, check_first=True) + return Resolver( + wrapper, + operation.alias or operation.function.__name__, + operation.conversion, + error_handler, + operation.order, + operation.schema, + parameters, + operation.parameters_metadata, + ) + + +@deprecate_kwargs({"union_ref": "union_name"}) +def graphql_schema( + *, + query: Iterable[Union[Callable, Query]] = (), + mutation: Iterable[Union[Callable, Mutation]] = (), + subscription: Iterable[Union[Callable[..., AsyncIterable], Subscription]] = (), + types: Iterable[Type] = (), + directives: Optional[Collection[graphql.GraphQLDirective]] = None, + description: Optional[str] = None, + extensions: Optional[Dict[str, Any]] = None, + aliaser: Optional[Aliaser] = to_camel_case, + enum_aliaser: Optional[Aliaser] = str.upper, + enum_schemas: Optional[Mapping[Enum, Schema]] = None, + id_types: Union[Collection[AnyType], IdPredicate] = (), + id_encoding: Tuple[ + Optional[Callable[[str], Any]], Optional[Callable[[Any], str]] + ] = (None, None), + union_name: UnionNameFactory = "Or".join, + default_deserialization: DefaultConversion = None, + default_serialization: DefaultConversion = None, +) -> graphql.GraphQLSchema: + if aliaser is None: + aliaser = settings.aliaser + if enum_aliaser is None: + enum_aliaser = lambda s: s + if default_deserialization is None: + default_deserialization = settings.deserialization.default_conversion + if default_serialization is None: + default_serialization = settings.serialization.default_conversion + query_fields: List[ResolverField] = [] + mutation_fields: List[ResolverField] = [] + subscription_fields: List[ResolverField] = [] + for operations, op_class, fields in [ + (query, Query, query_fields), + (mutation, Mutation, mutation_fields), + ]: + for operation in operations: # type: ignore + resolver = operation_resolver(operation, op_class) + resolver_field = ResolverField( + resolver, + resolver.types(), + resolver.parameters, + resolver.parameters_metadata, + ) + fields.append(resolver_field) + for sub_op in subscription: # type: ignore + if not isinstance(sub_op, Subscription): + sub_op = Subscription(sub_op) # type: ignore + sub_parameters: Sequence[Parameter] + if sub_op.resolver is not None: + subscriber2 = operation_resolver(sub_op, Subscription) + _, *sub_parameters = resolver_parameters(sub_op.resolver, check_first=False) + resolver = Resolver( + sub_op.resolver, + sub_op.alias or sub_op.resolver.__name__, + sub_op.conversion, + subscriber2.error_handler, + sub_op.order, + sub_op.schema, + sub_parameters, + sub_op.parameters_metadata, + ) + sub_types = resolver.types() + subscriber = replace(subscriber2, error_handler=None) + subscribe = resolver_resolve( + subscriber, + subscriber.types(), + aliaser, + default_deserialization, + default_serialization, + serialized=False, + ) + else: + subscriber2 = operation_resolver(sub_op, Subscription) + resolver = Resolver( + lambda _: _, + subscriber2.alias, + sub_op.conversion, + subscriber2.error_handler, + sub_op.order, + sub_op.schema, + (), + {}, + ) + subscriber = replace(subscriber2, error_handler=None) + sub_parameters = subscriber.parameters + sub_types = subscriber.types() + if get_origin2(sub_types["return"]) not in async_iterable_origins: + raise TypeError( + "Subscriptions must return an AsyncIterable/AsyncIterator" + ) + event_type = get_args2(sub_types["return"])[0] + subscribe = resolver_resolve( + subscriber, + sub_types, + aliaser, + default_deserialization, + default_serialization, + serialized=False, + ) + sub_types = {**sub_types, "return": resolver.return_type(event_type)} + + resolver_field = ResolverField( + resolver, sub_types, sub_parameters, sub_op.parameters_metadata, subscribe + ) + subscription_fields.append(resolver_field) + + is_id = as_predicate(id_types) + if id_encoding == (None, None): + id_type: graphql.GraphQLScalarType = graphql.GraphQLID + else: + id_deserializer, id_serializer = id_encoding + id_type = graphql.GraphQLScalarType( + name="ID", + serialize=id_serializer or graphql.GraphQLID.serialize, + parse_value=id_deserializer or graphql.GraphQLID.parse_value, + parse_literal=graphql.GraphQLID.parse_literal, + description=graphql.GraphQLID.description, + ) + + output_builder = OutputSchemaBuilder( + aliaser, + enum_aliaser, + enum_schemas or {}, + default_serialization, + id_type, + is_id, + union_name, + default_deserialization, + ) + + def root_type( + name: str, fields: Sequence[ResolverField] + ) -> Optional[graphql.GraphQLObjectType]: + if not fields: + return None + tp, type_name = type(name, (), {}), TypeName(graphql=name) + return output_builder.object(tp, (), fields).merge(type_name, None).raw_type # type: ignore + + return graphql.GraphQLSchema( + query=root_type("Query", query_fields), + mutation=root_type("Mutation", mutation_fields), + subscription=root_type("Subscription", subscription_fields), + types=[output_builder.visit(cls).raw_type for cls in types], # type: ignore + directives=directives, + description=description, + extensions=extensions, + ) diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/__init__.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/__init__.py new file mode 100644 index 0000000..ab529b3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/__init__.py @@ -0,0 +1,13 @@ +__all__ = [ + "JsonSchemaVersion", + "definitions_schema", + "deserialization_schema", + "serialization_schema", +] + +from apischema.json_schema.schema import ( + definitions_schema, + deserialization_schema, + serialization_schema, +) +from .versions import JsonSchemaVersion diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/conversions_resolver.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/conversions_resolver.py new file mode 100644 index 0000000..a04c7c9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/conversions_resolver.py @@ -0,0 +1,134 @@ +from contextlib import suppress +from typing import ( + Any, + Collection, + Iterable, + Iterator, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, +) + +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.conversions.visitor import ( + Conv, + ConversionsVisitor, + DeserializationVisitor, + SerializationVisitor, +) +from apischema.types import AnyType +from apischema.utils import is_hashable +from apischema.visitor import Unsupported + +try: + from apischema.typing import Annotated, is_union +except ImportError: + Annotated = ... # type: ignore + + +def merge_results( + results: Iterable[Sequence[AnyType]], origin: AnyType +) -> Sequence[AnyType]: + def rec(index=0) -> Iterator[Sequence[AnyType]]: + if index < len(result_list): + for next_ in rec(index + 1): + for res in result_list[index]: + yield (res, *next_) + else: + yield () + + result_list = list(results) + return [(Union if is_union(origin) else origin)[tuple(r)] for r in rec()] + + +class ConversionsResolver(ConversionsVisitor[Conv, Sequence[AnyType]]): + def __init__(self, default_conversion: DefaultConversion): + super().__init__(default_conversion) + self._skip_conversion = True + self._rec_guard: Set[Tuple[AnyType, Conv]] = set() + + def annotated(self, tp: AnyType, annotations: Sequence[Any]) -> Sequence[AnyType]: + return [ + Annotated[(res, *annotations)] for res in super().annotated(tp, annotations) + ] + + def collection( + self, cls: Type[Collection], value_type: AnyType + ) -> Sequence[AnyType]: + return merge_results([self.visit(value_type)], Collection) + + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> Sequence[AnyType]: + return merge_results([self.visit(key_type), self.visit(value_type)], Mapping) + + def new_type(self, tp: AnyType, super_type: AnyType) -> Sequence[AnyType]: + raise NotImplementedError + + def tuple(self, types: Sequence[AnyType]) -> Sequence[AnyType]: + return merge_results(map(self.visit, types), Tuple) + + def _visited_union(self, results: Sequence[Sequence[AnyType]]) -> Sequence[AnyType]: + return merge_results(results, Union) + + def visit_conversion( + self, + tp: AnyType, + conversion: Any, + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ) -> Sequence[AnyType]: + if conversion is not None and self._skip_conversion: + return [] if dynamic else [tp] + self._skip_conversion = False + results: Sequence[AnyType] = [] + if not is_hashable(tp): + with suppress(NotImplementedError, Unsupported): + results = super().visit_conversion( + tp, conversion, dynamic, next_conversion + ) + elif (tp, conversion) not in self._rec_guard: + self._rec_guard.add((tp, conversion)) + with suppress(NotImplementedError, Unsupported): + results = super().visit_conversion( + tp, conversion, dynamic, next_conversion + ) + self._rec_guard.remove((tp, conversion)) + if not dynamic and (conversion is not None or not results): + results = [tp, *results] + return results + + +class WithConversionsResolver: + def resolve_conversion(self, tp: AnyType) -> Sequence[AnyType]: + raise NotImplementedError + + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + Resolver: Type[ConversionsResolver] + if issubclass(cls, DeserializationVisitor): + + class Resolver(ConversionsResolver, DeserializationVisitor): + pass + + elif issubclass(cls, SerializationVisitor): + + class Resolver(ConversionsResolver, SerializationVisitor): + pass + + else: + return + + def resolve_conversion( + self: ConversionsVisitor, tp: AnyType + ) -> Sequence[AnyType]: + return Resolver(self.default_conversion).visit_with_conv( + tp, self._conversion + ) + + assert issubclass(cls, WithConversionsResolver) + cls.resolve_conversion = resolve_conversion # type: ignore diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/patterns.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/patterns.py new file mode 100644 index 0000000..a52488d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/patterns.py @@ -0,0 +1,23 @@ +from typing import Pattern + +from apischema.conversions.conversions import DefaultConversion +from apischema.types import AnyType + + +def infer_pattern(tp: AnyType, default_conversion: DefaultConversion) -> Pattern: + from apischema.json_schema.schema import DeserializationSchemaBuilder + + try: + builder = DeserializationSchemaBuilder( + False, default_conversion, False, lambda r: r, {} + ) + prop_schema = builder.visit(tp) + except RecursionError: + pass + else: + if ( + len(prop_schema.get("patternProperties", {})) == 1 + and "additionalProperties" not in prop_schema + ): + return next(iter(prop_schema["patternProperties"])) + raise TypeError("Cannot inferred pattern from type schema") from None diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/refs.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/refs.py new file mode 100644 index 0000000..ab69aa5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/refs.py @@ -0,0 +1,148 @@ +from collections import defaultdict +from enum import Enum +from typing import ( + Any, + Collection, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, +) + +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.conversions.visitor import ( + ConversionsVisitor, + DeserializationVisitor, + SerializationVisitor, +) +from apischema.json_schema.conversions_resolver import WithConversionsResolver +from apischema.objects import ObjectField +from apischema.objects.visitor import ( + DeserializationObjectVisitor, + ObjectVisitor, + SerializationObjectVisitor, +) +from apischema.type_names import TypeNameFactory, get_type_name +from apischema.types import AnyType +from apischema.utils import is_hashable, replace_builtins +from apischema.visitor import Unsupported + +try: + from apischema.typing import Annotated +except ImportError: + Annotated = ... # type: ignore + +Refs = Dict[str, Tuple[AnyType, int]] + + +class Recursive(Exception): + pass + + +T = TypeVar("T") + + +class RefsExtractor(ConversionsVisitor, ObjectVisitor, WithConversionsResolver): + def __init__(self, default_conversion: DefaultConversion, refs: Refs): + super().__init__(default_conversion) + self.refs = refs + self._rec_guard: Dict[ + Tuple[AnyType, Optional[AnyConversion]], int + ] = defaultdict(lambda: 0) + + def _incr_ref(self, ref: Optional[str], tp: AnyType) -> bool: + if ref is None: + return False + else: + ref_cls, count = self.refs.get(ref, (tp, 0)) + if replace_builtins(ref_cls) != replace_builtins(tp): + raise ValueError( + f"Types {tp} and {self.refs[ref][0]} share same reference '{ref}'" + ) + self.refs[ref] = (ref_cls, count + 1) + return count > 0 + + def annotated(self, tp: AnyType, annotations: Sequence[Any]): + for i, annotation in enumerate(reversed(annotations)): + if isinstance(annotation, TypeNameFactory): + ref = annotation.to_type_name(tp).json_schema + if not isinstance(ref, str): + continue + ref_annotations = annotations[: len(annotations) - i] + annotated = Annotated[(tp, *ref_annotations)] # type: ignore + if self._incr_ref(ref, annotated): + return + return super().annotated(tp, annotations) + + def any(self): + pass + + def collection(self, cls: Type[Collection], value_type: AnyType): + self.visit(value_type) + + def enum(self, cls: Type[Enum]): + pass + + def literal(self, values: Sequence[Any]): + pass + + def mapping(self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType): + self.visit(key_type) + self.visit(value_type) + + def object(self, tp: AnyType, fields: Sequence[ObjectField]): + for field in fields: + self.visit_with_conv(field.type, self._field_conversion(field)) + + def primitive(self, cls: Type): + pass + + def tuple(self, types: Sequence[AnyType]): + for cls in types: + self.visit(cls) + + def _visited_union(self, results: Sequence): + pass + + def visit_conversion( + self, + tp: AnyType, + conversion: Optional[Any], + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ): + ref_types = [] + if not dynamic: + for ref_tp in self.resolve_conversion(tp): + ref_types.append(ref_tp) + if self._incr_ref(get_type_name(ref_tp).json_schema, ref_tp): + return + if not is_hashable(tp): + return super().visit_conversion(tp, conversion, dynamic, next_conversion) + # 2 because the first type encountered of the recursive cycle can have no ref + # (see test_recursive_by_conversion_schema) + if self._rec_guard[(tp, self._conversion)] > 2: + raise TypeError(f"Recursive type {tp} need a ref") + self._rec_guard[(tp, self._conversion)] += 1 + try: + super().visit_conversion(tp, conversion, dynamic, next_conversion) + except Unsupported: + for ref_tp in ref_types: + self.refs.pop(get_type_name(ref_tp).json_schema, ...) # type: ignore + finally: + self._rec_guard[(tp, self._conversion)] -= 1 + + +class DeserializationRefsExtractor( + RefsExtractor, DeserializationVisitor, DeserializationObjectVisitor +): + pass + + +class SerializationRefsExtractor( + RefsExtractor, SerializationVisitor, SerializationObjectVisitor +): + pass diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/schema.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/schema.py new file mode 100644 index 0000000..a20ae6b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/schema.py @@ -0,0 +1,737 @@ +from contextlib import suppress +from dataclasses import dataclass +from enum import Enum +from itertools import chain +from typing import ( + AbstractSet, + Any, + Callable, + ClassVar, + Collection, + Dict, + List, + Mapping, + Optional, + Pattern, + Sequence, + Tuple, + Type, + TypeVar, + Union, +) + +from apischema.aliases import Aliaser +from apischema.conversions import converters +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.conversions.visitor import ( + Conv, + ConversionsVisitor, + Deserialization, + DeserializationVisitor, + Serialization, + SerializationVisitor, +) +from apischema.dependencies import get_dependent_required +from apischema.json_schema.conversions_resolver import WithConversionsResolver +from apischema.json_schema.patterns import infer_pattern +from apischema.json_schema.refs import ( + DeserializationRefsExtractor, + Refs, + RefsExtractor as RefsExtractor_, + SerializationRefsExtractor, +) +from apischema.json_schema.types import JsonSchema, JsonType, json_schema +from apischema.json_schema.versions import JsonSchemaVersion, RefFactory +from apischema.metadata.keys import SCHEMA_METADATA +from apischema.objects import AliasedStr, ObjectField +from apischema.objects.visitor import ( + DeserializationObjectVisitor, + ObjectVisitor, + SerializationObjectVisitor, +) +from apischema.ordering import Ordering, sort_by_order +from apischema.schemas import Schema, get_schema as _get_schema, merge_schema +from apischema.serialization import serialize +from apischema.serialization.serialized_methods import ( + SerializedMethod, + get_serialized_methods, +) +from apischema.type_names import TypeNameFactory, get_type_name +from apischema.types import AnyType, OrderedDict, UndefinedType +from apischema.typing import get_args, is_typed_dict +from apischema.utils import ( + context_setter, + deprecate_kwargs, + get_origin_or_type, + is_union_of, + literal_values, +) + + +def get_schema(tp: AnyType) -> Optional[Schema]: + from apischema import settings + + return merge_schema(settings.base_schema.type(tp), _get_schema(tp)) + + +def get_field_schema(tp: AnyType, field: ObjectField) -> Optional[Schema]: + from apischema import settings + + assert not field.is_aggregate + return merge_schema( + settings.base_schema.field(tp, field.name, field.alias), field.schema + ) + + +def get_method_schema(tp: AnyType, method: SerializedMethod) -> Optional[Schema]: + from apischema import settings + + return merge_schema( + settings.base_schema.method(tp, method.func, method.alias), method.schema + ) + + +def full_schema(base_schema: JsonSchema, schema: Optional[Schema]) -> JsonSchema: + if schema is not None: + base_schema = JsonSchema(base_schema) + schema.merge_into(base_schema) + return base_schema + + +Method = TypeVar("Method", bound=Callable) + + +@dataclass(frozen=True) +class Property: + alias: AliasedStr + name: str + ordering: Optional[Ordering] + required: bool + schema: JsonSchema + + +class SchemaBuilder( + ConversionsVisitor[Conv, JsonSchema], + ObjectVisitor[JsonSchema], + WithConversionsResolver, +): + def __init__( + self, + additional_properties: bool, + default_conversion: DefaultConversion, + ignore_first_ref: bool, + ref_factory: RefFactory, + refs: Collection[str], + ): + super().__init__(default_conversion) + self.additional_properties = additional_properties + self._ignore_first_ref = ignore_first_ref + self.ref_factory = ref_factory + self.refs = refs + + def ref_schema(self, ref: Optional[str]) -> Optional[JsonSchema]: + if ref not in self.refs: + return None + elif self._ignore_first_ref: + self._ignore_first_ref = False + return None + else: + assert isinstance(ref, str) + return JsonSchema({"$ref": self.ref_factory(ref)}) + + def annotated(self, tp: AnyType, annotations: Sequence[Any]) -> JsonSchema: + schema = None + for annotation in reversed(annotations): + if isinstance(annotation, TypeNameFactory): + ref = annotation.to_type_name(tp).json_schema + ref_schema = self.ref_schema(ref) + if ref_schema is not None: + return full_schema(ref_schema, schema) + if isinstance(annotation, Mapping): + schema = merge_schema(annotation.get(SCHEMA_METADATA), schema) + return full_schema(super().annotated(tp, annotations), schema) + + def any(self) -> JsonSchema: + return JsonSchema() + + def collection(self, cls: Type[Collection], value_type: AnyType) -> JsonSchema: + return json_schema( + type=JsonType.ARRAY, + items=self.visit(value_type), + uniqueItems=issubclass(cls, AbstractSet), + ) + + def enum(self, cls: Type[Enum]) -> JsonSchema: + if len(cls) == 0: + raise TypeError("Empty enum") + return self.literal(list(cls)) + + def literal(self, values: Sequence[Any]) -> JsonSchema: + if not values: + raise TypeError("Empty Literal") + types = {JsonType.from_type(type(v)) for v in literal_values(values)} + # Mypy issue + type_: Any = types.pop() if len(types) == 1 else types + if len(values) == 1: + return json_schema(type=type_, const=values[0]) + else: + return json_schema(type=type_, enum=values) + + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> JsonSchema: + with context_setter(self): + self._ignore_first_ref = True + key = self.visit(key_type) + if key["type"] != JsonType.STRING: + raise ValueError("Mapping types must string-convertible key") + value = self.visit(value_type) + if "pattern" in key: + return json_schema( + type=JsonType.OBJECT, patternProperties={key["pattern"]: value} + ) + else: + return json_schema(type=JsonType.OBJECT, additionalProperties=value) + + def visit_field( + self, tp: AnyType, field: ObjectField, required: bool = True + ) -> JsonSchema: + assert not field.is_aggregate + result = full_schema( + self.visit_with_conv(field.type, self._field_conversion(field)), + get_field_schema(tp, field) if tp is not None else field.schema, + ) + if not required and "default" not in result: + result = JsonSchema(result) + with suppress(Exception): + result["default"] = serialize( + field.type, + field.get_default(), + fall_back_on_any=False, + check_type=True, + conversion=field.serialization, + ) + return result + + def _object_schema(self, cls: type, field: ObjectField) -> JsonSchema: + assert field.is_aggregate + with context_setter(self): + self._ignore_first_ref = True + object_schema = full_schema( + self.visit_with_conv(field.type, self._field_conversion(field)), + field.schema, + ) + if object_schema.get("type") not in {JsonType.OBJECT, "object"}: + field_type = "Flattened" if field.flattened else "Properties" + raise TypeError( + f"{field_type} field {cls.__name__}.{field.name}" + f" must have an object type" + ) + return object_schema + + def _properties_schema( + self, object_schema: JsonSchema, pattern: Optional[Pattern] = None + ): + if "patternProperties" in object_schema: + if pattern is not None: + for p in (pattern, pattern.pattern): + if p in object_schema["patternProperties"]: + return object_schema["patternProperties"][p] + elif ( + len(object_schema["patternProperties"]) == 1 + and "additionalProperties" not in object_schema + ): + return next(iter(object_schema["patternProperties"].values())) + if isinstance(object_schema.get("additionalProperties"), Mapping): + return object_schema["additionalProperties"] + return JsonSchema() + + def properties( + self, tp: AnyType, fields: Sequence[ObjectField] + ) -> Sequence[Property]: + raise NotImplementedError + + def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> JsonSchema: + cls = get_origin_or_type(tp) + properties = sort_by_order( + cls, self.properties(tp, fields), lambda p: p.name, lambda p: p.ordering + ) + flattened_schemas: List[JsonSchema] = [] + pattern_properties = {} + additional_properties: Union[bool, JsonSchema] = self.additional_properties + for field in fields: + if field.flattened: + self._object_schema(cls, field) # check the field is an object + flattened_schemas.append( + full_schema( + self.visit_with_conv(field.type, self._field_conversion(field)), + field.schema, + ) + ) + elif field.pattern_properties is not None: + if field.pattern_properties is ...: + pattern = infer_pattern(field.type, self.default_conversion) + else: + assert isinstance(field.pattern_properties, Pattern) + pattern = field.pattern_properties + pattern_properties[pattern] = self._properties_schema( + self._object_schema(cls, field), pattern + ) + elif field.additional_properties: + additional_properties = self._properties_schema( + self._object_schema(cls, field) + ) + alias_by_names = {f.name: f.alias for f in fields}.__getitem__ + dependent_required = get_dependent_required(cls) + result = json_schema( + type=JsonType.OBJECT, + properties={p.alias: p.schema for p in properties}, + required=[p.alias for p in properties if p.required], + additionalProperties=additional_properties, + patternProperties=pattern_properties, + dependentRequired=OrderedDict( + (alias_by_names(f), sorted(map(alias_by_names, dependent_required[f]))) + for f in sorted(dependent_required, key=alias_by_names) + ), + ) + if flattened_schemas: + result = json_schema( + type=JsonType.OBJECT, + allOf=[result, *flattened_schemas], + unevaluatedProperties=False, + ) + return result + + def primitive(self, cls: Type) -> JsonSchema: + return JsonSchema(type=JsonType.from_type(cls)) + + def tuple(self, types: Sequence[AnyType]) -> JsonSchema: + return json_schema( + type=JsonType.ARRAY, + prefixItems=[self.visit(cls) for cls in types], + items=False, + minItems=len(types), + maxItems=len(types), + ) + + def _visited_union(self, results: Sequence[JsonSchema]) -> JsonSchema: + if len(results) == 1: + return results[0] + elif any(alt == {} for alt in results): + return JsonSchema() + elif all(alt.keys() == {"type"} for alt in results): + types: Any = chain.from_iterable( + [res["type"]] + if isinstance(res["type"], (str, JsonType)) + else res["type"] + for res in results + ) + return json_schema(type=list(types)) + elif ( + len(results) == 2 + and all("type" in res for res in results) + and {"type": "null"} in results + ): + for result in results: + if result != {"type": "null"}: + types = result["type"] + if isinstance(types, (str, JsonType)): + types = [types] + if "null" not in types: + result = JsonSchema({**result, "type": [*types, "null"]}) + return result + else: + raise NotImplementedError + else: + return json_schema(anyOf=results) + + def visit_conversion( + self, + tp: AnyType, + conversion: Optional[Conv], + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ) -> JsonSchema: + schema = None + if not dynamic: + for ref_tp in self.resolve_conversion(tp): + ref_schema = self.ref_schema(get_type_name(ref_tp).json_schema) + if ref_schema is not None: + return ref_schema + if get_args(tp): + schema = merge_schema(schema, get_schema(get_origin_or_type(tp))) + schema = merge_schema(schema, get_schema(tp)) + result = super().visit_conversion(tp, conversion, dynamic, next_conversion) + return full_schema(result, schema) + + RefsExtractor: ClassVar[Type[RefsExtractor_]] + + +class DeserializationSchemaBuilder( + SchemaBuilder[Deserialization], + DeserializationVisitor[JsonSchema], + DeserializationObjectVisitor[JsonSchema], +): + RefsExtractor = DeserializationRefsExtractor + + def properties( + self, tp: AnyType, fields: Sequence[ObjectField] + ) -> Sequence[Property]: + return [ + Property( + AliasedStr(field.alias), + field.name, + field.ordering, + field.required, + self.visit_field(tp, field, field.required), + ) + for field in fields + if not field.is_aggregate + ] + + +class SerializationSchemaBuilder( + SchemaBuilder[Serialization], + SerializationVisitor[JsonSchema], + SerializationObjectVisitor[JsonSchema], +): + RefsExtractor = SerializationRefsExtractor + + @staticmethod + def _field_required(field: ObjectField): + from apischema import settings + + return not field.skippable( + settings.serialization.exclude_defaults, settings.serialization.exclude_none + ) + + def properties( + self, tp: AnyType, fields: Sequence[ObjectField] + ) -> Sequence[Property]: + from apischema import settings + + return [ + Property( + AliasedStr(field.alias), + field.name, + field.ordering, + required, + self.visit_field(tp, field, required), + ) + for field in fields + if not field.is_aggregate + for required in [ + field.required + if is_typed_dict(get_origin_or_type(tp)) + else not field.skippable( + settings.serialization.exclude_defaults, + settings.serialization.exclude_none, + ) + ] + ] + [ + Property( + AliasedStr(serialized.alias), + serialized.func.__name__, + serialized.ordering, + not is_union_of(types["return"], UndefinedType), + full_schema( + self.visit_with_conv(types["return"], serialized.conversion), + get_method_schema(tp, serialized), + ), + ) + for serialized, types in get_serialized_methods(tp) + ] + + +TypesWithConversion = Collection[Union[AnyType, Tuple[AnyType, AnyConversion]]] + + +def _default_version( + version: Optional[JsonSchemaVersion], + ref_factory: Optional[RefFactory], + all_refs: Optional[bool], +) -> Tuple[JsonSchemaVersion, RefFactory, bool]: + from apischema import settings + + if version is None: + version = settings.json_schema_version + if ref_factory is None: + ref_factory = version.ref_factory + if all_refs is None: + all_refs = version.all_refs + return version, ref_factory, all_refs + + +def _extract_refs( + types: TypesWithConversion, + default_conversion: DefaultConversion, + builder: Type[SchemaBuilder], + all_refs: bool, +) -> Mapping[str, AnyType]: + refs: Refs = {} + for tp in types: + conversion = None + if isinstance(tp, tuple): + tp, conversion = tp + builder.RefsExtractor(default_conversion, refs).visit_with_conv(tp, conversion) + filtr = (lambda count: True) if all_refs else (lambda count: count > 1) + return {ref: tp for ref, (tp, count) in refs.items() if filtr(count)} + + +def _refs_schema( + builder: Type[SchemaBuilder], + default_conversion: DefaultConversion, + refs: Mapping[str, AnyType], + ref_factory: RefFactory, + additional_properties: bool, +) -> Mapping[str, JsonSchema]: + return { + ref: builder( + additional_properties, default_conversion, True, ref_factory, refs + ).visit(tp) + for ref, tp in refs.items() + } + + +def _schema( + builder: Type[SchemaBuilder], + tp: AnyType, + schema: Optional[Schema], + conversion: Optional[AnyConversion], + default_conversion: DefaultConversion, + version: Optional[JsonSchemaVersion], + aliaser: Optional[Aliaser], + ref_factory: Optional[RefFactory], + all_refs: Optional[bool], + with_schema: bool, + additional_properties: Optional[bool], +) -> Mapping[str, Any]: + from apischema import settings + + add_defs = ref_factory is None + if aliaser is None: + aliaser = settings.aliaser + if additional_properties is None: + additional_properties = settings.additional_properties + version, ref_factory, all_refs = _default_version(version, ref_factory, all_refs) + refs = _extract_refs([(tp, conversion)], default_conversion, builder, all_refs) + json_schema = builder( + additional_properties, default_conversion, False, ref_factory, refs + ).visit_with_conv(tp, conversion) + json_schema = full_schema(json_schema, schema) + if add_defs and version.defs: + defs = _refs_schema( + builder, default_conversion, refs, ref_factory, additional_properties + ) + if defs: + json_schema["$defs"] = defs + result = serialize( + JsonSchema, + json_schema, + aliaser=aliaser, + check_type=True, + conversion=version.conversion, + default_conversion=converters.default_serialization, + fall_back_on_any=True, + ) + if with_schema and version.schema is not None: + result["$schema"] = version.schema + return result + + +@deprecate_kwargs({"conversions": "conversion"}) +def deserialization_schema( + tp: AnyType, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + all_refs: bool = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + ref_factory: RefFactory = None, + schema: Schema = None, + version: JsonSchemaVersion = None, + with_schema: bool = True, +) -> Mapping[str, Any]: + from apischema import settings + + return _schema( + DeserializationSchemaBuilder, + tp, + schema, + conversion, + default_conversion or settings.deserialization.default_conversion, + version, + aliaser, + ref_factory, + all_refs, + with_schema, + additional_properties, + ) + + +@deprecate_kwargs({"conversions": "conversion"}) +def serialization_schema( + tp: AnyType, + *, + additional_properties: bool = None, + all_refs: bool = None, + aliaser: Aliaser = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + ref_factory: RefFactory = None, + schema: Schema = None, + version: JsonSchemaVersion = None, + with_schema: bool = True, +) -> Mapping[str, Any]: + from apischema import settings + + return _schema( + SerializationSchemaBuilder, + tp, + schema, + conversion, + default_conversion or settings.serialization.default_conversion, + version, + aliaser, + ref_factory, + all_refs, + with_schema, + additional_properties, + ) + + +def _defs_schema( + types: TypesWithConversion, + default_conversion: DefaultConversion, + builder: Type[SchemaBuilder], + ref_factory: RefFactory, + all_refs: bool, + additional_properties: bool, +) -> Mapping[str, JsonSchema]: + return _refs_schema( + builder, + default_conversion, + _extract_refs(types, default_conversion, builder, all_refs), + ref_factory, + additional_properties, + ) + + +def _set_missing_properties( + schema: JsonSchema, properties: Optional[Mapping[str, JsonSchema]], key: str +) -> JsonSchema: + if properties is None: + return schema + missing = {name: prop for name, prop in properties.items() if prop.get(key, False)} + schema.setdefault("properties", {}).update(missing) + return schema + + +def compare_schemas(write: Any, read: Any) -> Any: + if isinstance(write, Mapping): + if not isinstance(read, Mapping): + raise ValueError + merged: Dict[str, Any] = {} + for key in write.keys() | read.keys(): + if key in write and key in read: + if key == "properties": + merged[key] = {} + for prop in write[key].keys() | read[key].keys(): + if prop in write[key] and prop in read[key]: + merged[key][prop] = compare_schemas( + write[key][prop], read[key][prop] + ) + elif prop in write[key]: + merged[key][prop] = {**write[key][prop], "writeOnly": True} + else: + merged[key][prop] = {**read[key][prop], "readOnly": True} + elif key in { + "required", + "dependentRequired", + "additionalProperties", + "patternProperties", + }: + merged[key] = write[key] + else: + merged[key] = compare_schemas(write[key], read[key]) + else: + merged[key] = write.get(key, read.get(key)) + return merged + elif isinstance(read, Sequence) and not isinstance(read, str): + if not isinstance(read, Sequence) or len(write) != len(read): + raise ValueError + return [compare_schemas(write[i], read[i]) for i in range(len(write))] + else: + if not write == read: + raise ValueError + return write + + +def definitions_schema( + *, + deserialization: TypesWithConversion = (), + serialization: TypesWithConversion = (), + default_deserialization: DefaultConversion = None, + default_serialization: DefaultConversion = None, + aliaser: Aliaser = None, + version: JsonSchemaVersion = None, + ref_factory: Optional[RefFactory] = None, + all_refs: bool = None, + additional_properties: bool = None, +) -> Mapping[str, Mapping[str, Any]]: + from apischema import settings + + if additional_properties is None: + additional_properties = settings.additional_properties + if aliaser is None: + aliaser = settings.aliaser + if default_deserialization is None: + default_deserialization = settings.deserialization.default_conversion + if default_serialization is None: + default_serialization = settings.serialization.default_conversion + version, ref_factory, all_refs = _default_version(version, ref_factory, all_refs) + deserialization_schemas = _defs_schema( + deserialization, + default_deserialization, + DeserializationSchemaBuilder, + ref_factory, + all_refs, + additional_properties, + ) + serialization_schemas = _defs_schema( + serialization, + default_serialization, + SerializationSchemaBuilder, + ref_factory, + all_refs, + additional_properties, + ) + schemas = {} + for ref in deserialization_schemas.keys() | serialization_schemas.keys(): + if ref in deserialization_schemas and ref in serialization_schemas: + try: + schemas[ref] = compare_schemas( + deserialization_schemas[ref], serialization_schemas[ref] + ) + except ValueError: + raise TypeError( + f"Reference {ref} has different schemas" + f" for deserialization and serialization" + ) + else: + schemas[ref] = deserialization_schemas.get( + ref, serialization_schemas.get(ref) + ) + return { + ref: serialize( + JsonSchema, + schema, + aliaser=aliaser, + fall_back_on_any=True, + check_type=True, + conversion=version.conversion, + default_conversion=converters.default_serialization, + ) + for ref, schema in schemas.items() + } diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/types.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/types.py new file mode 100644 index 0000000..37b7e48 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/types.py @@ -0,0 +1,130 @@ +from enum import Enum +from functools import wraps +from inspect import signature +from typing import ( + Any, + Callable, + Collection, + Dict, + Mapping, + Pattern, + Sequence, + Type, + TypeVar, + Union, + cast, +) + +from apischema.conversions import Conversion, serializer +from apischema.types import NoneType, Number, Undefined +from apischema.validation.errors import ValidationError + + +class JsonType(str, Enum): + NULL = "null" + BOOLEAN = "boolean" + STRING = "string" + INTEGER = "integer" + NUMBER = "number" + ARRAY = "array" + OBJECT = "object" + + @staticmethod + def from_type(cls: Type) -> "JsonType": + return TYPE_TO_JSON_TYPE[cls] + + +class JsonTypes(Dict[type, JsonType]): + def __missing__(self, key): + raise TypeError(f"Invalid JSON type {key}") + + +TYPE_TO_JSON_TYPE = JsonTypes( + { + NoneType: JsonType.NULL, + bool: JsonType.BOOLEAN, + str: JsonType.STRING, + int: JsonType.INTEGER, + float: JsonType.NUMBER, + list: JsonType.ARRAY, + dict: JsonType.OBJECT, + } +) + + +def bad_type(data: Any, *expected: type) -> ValidationError: + msgs = [ + f"expected type {JsonType.from_type(tp)}," + f" found {JsonType.from_type(data.__class__)}" + for tp in expected + ] + return ValidationError(msgs) + + +class JsonSchema(Dict[str, Any]): + pass + + +serializer(Conversion(dict, source=JsonSchema)) + + +Func = TypeVar("Func", bound=Callable) + + +def json_schema_kwargs(func: Func) -> Func: + @wraps(func) + def wrapper(**kwargs): + type_ = kwargs.get("type") + if isinstance(type_, Sequence): + if JsonType.INTEGER in type_ and JsonType.NUMBER in type_: + kwargs["type"] = [t for t in type_ if t != JsonType.INTEGER] + return JsonSchema( + (k, v) + for k, v in kwargs.items() + if k not in _json_schema_params or v != _json_schema_params[k].default + ) + + _json_schema_params = signature(func).parameters + return cast(Func, wrapper) + + +@json_schema_kwargs +def json_schema( + *, + additionalProperties: Union[bool, JsonSchema] = JsonSchema(), + allOf: Sequence[JsonSchema] = [], + anyOf: Sequence[JsonSchema] = [], + const: Any = Undefined, + default: Any = Undefined, + dependentRequired: Mapping[str, Collection[str]] = {}, + deprecated: bool = False, + description: str = None, + enum: Sequence[Any] = [], + exclusiveMaximum: Number = None, + exclusiveMinimum: Number = None, + examples: Sequence[Any] = None, + format: str = None, + items: Union[bool, JsonSchema] = JsonSchema(), + maximum: Number = None, + minimum: Number = None, + maxItems: int = None, + minItems: int = None, + maxLength: int = None, + minLength: int = None, + maxProperties: int = None, + minProperties: int = None, + multipleOf: Number = None, + oneOf: Sequence[JsonSchema] = [], + pattern: Pattern = None, + patternProperties: Mapping[Pattern, JsonSchema] = {}, + prefixItems: Sequence[JsonSchema] = [], + properties: Mapping[str, JsonSchema] = {}, + readOnly: bool = False, + required: Sequence[str] = [], + title: str = None, + type: Union[JsonType, Sequence[JsonType]] = None, + uniqueItems: bool = False, + unevaluatedProperties: Union[bool, JsonSchema] = JsonSchema(), + writeOnly: bool = False, +) -> JsonSchema: + ... diff --git a/.venv/lib/python3.9/site-packages/apischema/json_schema/versions.py b/.venv/lib/python3.9/site-packages/apischema/json_schema/versions.py new file mode 100644 index 0000000..cea2378 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/json_schema/versions.py @@ -0,0 +1,129 @@ +from dataclasses import dataclass +from typing import Any, Callable, ClassVar, Dict, Optional + +from apischema.conversions import Conversion, LazyConversion +from apischema.json_schema.types import JsonSchema, JsonType + +RefFactory = Callable[[str], str] + + +def ref_prefix(prefix: str) -> RefFactory: + if not prefix.endswith("/"): + prefix += "/" + return lambda ref: prefix + ref + + +def isolate_ref(schema: Dict[str, Any]): + if "$ref" in schema and len(schema) > 1: + schema.setdefault("allOf", []).append({"$ref": schema.pop("$ref")}) + + +def to_json_schema_2019_09(schema: JsonSchema) -> Dict[str, Any]: + result = schema.copy() + if "prefixItems" in result: + if "items" in result: + result["additionalItems"] = result.pop("items") + result["items"] = result["prefixItems"] + return result + + +def to_json_schema_7(schema: JsonSchema) -> Dict[str, Any]: + result = to_json_schema_2019_09(schema) + isolate_ref(result) + if "$defs" in result: + result["definitions"] = {**result.pop("$defs"), **result.get("definitions", {})} + if "dependentRequired" in result: + result["dependencies"] = { + **result.pop("dependentRequired"), + **result.get("dependencies", {}), + } + return result + + +OPEN_API_3_0_UNSUPPORTED = [ + "dependentRequired", + "unevaluatedProperties", + "additionalItems", +] + + +def to_open_api_3_0(schema: JsonSchema) -> Dict[str, Any]: + result = to_json_schema_2019_09(schema) + for key in OPEN_API_3_0_UNSUPPORTED: + result.pop(key, ...) + isolate_ref(result) + if {"type": "null"} in result.get("anyOf", ()): + result.setdefault("nullable", True) + result["anyOf"] = [a for a in result["anyOf"] if a != {"type": "null"}] + if "type" in result and not isinstance(result["type"], (str, JsonType)): + if "null" in result["type"]: + result.setdefault("nullable", True) + result["type"] = [t for t in result["type"] if t != "null"] + if len(result["type"]) > 1: + result.setdefault("anyOf", []).extend( + {"type": t} for t in result.pop("type") + ) + else: + result["type"] = result["type"][0] + if "examples" in result: + result.setdefault("example", result.pop("examples")[0]) + if "const" in result: + result.setdefault("enum", [result.pop("const")]) + return result + + +@dataclass +class JsonSchemaVersion: + schema: Optional[str] = None + ref_prefix: str = "" + serialization: Optional[Callable] = None + all_refs: bool = True + defs: bool = True + + @property + def conversion(self) -> Optional[Conversion]: + if self.serialization: + # Recursive conversion pattern + tmp = None + conversion = Conversion( + self.serialization, sub_conversion=LazyConversion(lambda: tmp) + ) + tmp = conversion + return conversion + else: + return None + + @property + def ref_factory(self) -> RefFactory: + return ref_prefix(self.ref_prefix) + + DRAFT_2020_12: ClassVar["JsonSchemaVersion"] + DRAFT_2019_09: ClassVar["JsonSchemaVersion"] + DRAFT_7: ClassVar["JsonSchemaVersion"] + OPEN_API_3_0: ClassVar["JsonSchemaVersion"] + OPEN_API_3_1: ClassVar["JsonSchemaVersion"] + + +JsonSchemaVersion.DRAFT_2020_12 = JsonSchemaVersion( + "http://json-schema.org/draft/2020-12/schema#", "#/$defs/", None, False, True +) +JsonSchemaVersion.DRAFT_2019_09 = JsonSchemaVersion( + "http://json-schema.org/draft/2020-12/schema#", + "#/$defs/", + to_json_schema_2019_09, + False, + True, +) +JsonSchemaVersion.DRAFT_7 = JsonSchemaVersion( + "http://json-schema.org/draft-07/schema#", + "#/definitions/", + to_json_schema_7, + False, + True, +) +JsonSchemaVersion.OPEN_API_3_0 = JsonSchemaVersion( + None, "#/components/schemas/", to_open_api_3_0, True, False +) +JsonSchemaVersion.OPEN_API_3_1 = JsonSchemaVersion( + None, "#/components/schemas/", None, True, False +) diff --git a/.venv/lib/python3.9/site-packages/apischema/metadata/__init__.py b/.venv/lib/python3.9/site-packages/apischema/metadata/__init__.py new file mode 100644 index 0000000..9418c19 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/metadata/__init__.py @@ -0,0 +1,52 @@ +__all__ = [ + "alias", + "conversion", + "default_as_set", + "fall_back_on_default", + "flatten", + "init_var", + "none_as_undefined", + "order", + "post_init", + "properties", + "required", + "schema", + "skip", + "validators", +] + +import sys +import warnings + +from apischema.aliases import alias +from apischema.ordering import order +from apischema.schemas import schema +from .implem import ( + conversion, + default_as_set, + fall_back_on_default, + flatten, + init_var, + none_as_undefined, + post_init, + properties, + required, + skip, + validators, +) + +if sys.version_info >= (3, 7): + + def __getattr__(name): + for deprecated in ("merged", "flattened"): + if name == deprecated: + warnings.warn( + f"apischema.metadata.{deprecated} is deprecated, " + "use apischema.metadata.flatten instead", + DeprecationWarning, + ) + return flatten + raise AttributeError(f"module {__name__} has no attribute {name}") + +else: + from .implem import flattened, merged # noqa: F401 diff --git a/.venv/lib/python3.9/site-packages/apischema/metadata/implem.py b/.venv/lib/python3.9/site-packages/apischema/metadata/implem.py new file mode 100644 index 0000000..5629289 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/metadata/implem.py @@ -0,0 +1,105 @@ +import re +from dataclasses import dataclass +from typing import Any, Callable, Optional, Pattern, TYPE_CHECKING, Tuple, Union + +from apischema.metadata.keys import ( + CONVERSION_METADATA, + DEFAULT_AS_SET_METADATA, + FALL_BACK_ON_DEFAULT_METADATA, + FLATTEN_METADATA, + INIT_VAR_METADATA, + NONE_AS_UNDEFINED_METADATA, + POST_INIT_METADATA, + PROPERTIES_METADATA, + REQUIRED_METADATA, + SKIP_METADATA, + VALIDATORS_METADATA, +) +from apischema.types import AnyType, Metadata, MetadataImplem, MetadataMixin + +if TYPE_CHECKING: + from apischema.conversions.conversions import AnyConversion + from apischema.validation.validators import Validator + + +def simple_metadata(key: str) -> Metadata: + return MetadataImplem({key: ...}) + + +@dataclass(frozen=True) +class ConversionMetadata(MetadataMixin): + key = CONVERSION_METADATA + deserialization: Optional["AnyConversion"] = None + serialization: Optional["AnyConversion"] = None + + +conversion = ConversionMetadata + +default_as_set = simple_metadata(DEFAULT_AS_SET_METADATA) + +fall_back_on_default = simple_metadata(FALL_BACK_ON_DEFAULT_METADATA) + +flatten = simple_metadata(FLATTEN_METADATA) +flattened = flatten +merged = flatten + + +def init_var(tp: AnyType) -> Metadata: + return MetadataImplem({INIT_VAR_METADATA: tp}) + + +none_as_undefined = simple_metadata(NONE_AS_UNDEFINED_METADATA) + +post_init = simple_metadata(POST_INIT_METADATA) + + +class PropertiesMetadata(dict, Metadata): # type: ignore + def __init__(self): + super().__init__({PROPERTIES_METADATA: None}) + + def __call__( + self, pattern: Union[str, Pattern, "ellipsis"] # noqa: F821 + ) -> Metadata: + if pattern is not ...: + pattern = re.compile(pattern) + return MetadataImplem({PROPERTIES_METADATA: pattern}) + + +properties = PropertiesMetadata() + +required = simple_metadata(REQUIRED_METADATA) + + +@dataclass(frozen=True) +class SkipMetadata(MetadataMixin): + key = SKIP_METADATA + deserialization: bool = False + serialization: bool = False + serialization_default: bool = False + serialization_if: Optional[Callable[[Any], Any]] = None + + def __call__( + self, + deserialization: bool = False, + serialization: bool = False, + serialization_default: bool = False, + serialization_if: Optional[Callable[[Any], Any]] = None, + ) -> "SkipMetadata": + return SkipMetadata( + deserialization, serialization, serialization_default, serialization_if + ) + + +skip = SkipMetadata(deserialization=True, serialization=True) + + +@dataclass(frozen=True) +class ValidatorsMetadata(MetadataMixin): + key = VALIDATORS_METADATA + validators: Tuple["Validator", ...] + + +def validators(*validator: Callable) -> ValidatorsMetadata: + from apischema.validation.validators import Validator + + return ValidatorsMetadata(tuple(map(Validator, validator))) diff --git a/.venv/lib/python3.9/site-packages/apischema/metadata/keys.py b/.venv/lib/python3.9/site-packages/apischema/metadata/keys.py new file mode 100644 index 0000000..de3d924 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/metadata/keys.py @@ -0,0 +1,17 @@ +from apischema.utils import PREFIX + +ALIAS_METADATA = f"{PREFIX}alias" +ALIAS_NO_OVERRIDE_METADATA = f"{PREFIX}alias_no_override" +CONVERSION_METADATA = f"{PREFIX}conversion" +DEFAULT_AS_SET_METADATA = f"{PREFIX}default_as_set" +FALL_BACK_ON_DEFAULT_METADATA = f"{PREFIX}fall_back_on_default" +FLATTEN_METADATA = f"{PREFIX}flattened" +INIT_VAR_METADATA = f"{PREFIX}init_var" +NONE_AS_UNDEFINED_METADATA = f"{PREFIX}none_as_undefined" +ORDERING_METADATA = f"{PREFIX}ordering" +POST_INIT_METADATA = f"{PREFIX}post_init" +PROPERTIES_METADATA = f"{PREFIX}properties" +REQUIRED_METADATA = f"{PREFIX}required" +SCHEMA_METADATA = f"{PREFIX}schema" +SKIP_METADATA = f"{PREFIX}skip" +VALIDATORS_METADATA = f"{PREFIX}validators" diff --git a/.venv/lib/python3.9/site-packages/apischema/methods.py b/.venv/lib/python3.9/site-packages/apischema/methods.py new file mode 100644 index 0000000..d0dc03a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/methods.py @@ -0,0 +1,137 @@ +import inspect +from functools import wraps +from inspect import signature +from types import FunctionType +from typing import Callable, Generic, Optional, Type, Union, cast + +from apischema.typing import get_type_hints +from apischema.utils import PREFIX, T, get_origin_or_type2 + +MethodOrProperty = Union[Callable, property] + + +def _method_location(method: MethodOrProperty) -> Optional[Type]: + if isinstance(method, property): + assert method.fget is not None + method = method.fget + while hasattr(method, "__wrapped__"): + method = method.__wrapped__ # type: ignore + assert isinstance(method, FunctionType) + global_name, *class_path = method.__qualname__.split(".")[:-1] + if global_name not in method.__globals__: + return None + location = method.__globals__[global_name] + for attr in class_path: + if hasattr(location, attr): + location = getattr(location, attr) + else: + break + return location + + +def is_method(method: MethodOrProperty) -> bool: + """Return if the function is method/property declared in a class""" + return ( + isinstance(method, property) + and method.fget is not None + and is_method(method.fget) + ) or ( + isinstance(method, FunctionType) + and method.__name__ != method.__qualname__ + and isinstance(_method_location(method), (type, type(None))) + and next(iter(inspect.signature(method).parameters), None) == "self" + ) + + +def method_class(method: MethodOrProperty) -> Optional[Type]: + cls = _method_location(method) + return cls if isinstance(cls, type) else None + + +METHOD_WRAPPER_ATTR = f"{PREFIX}method_wrapper" + + +def method_wrapper(method: MethodOrProperty, name: str = None) -> Callable: + if isinstance(method, property): + assert method.fget is not None + name = name or method.fget.__name__ + + @wraps(method.fget) + def wrapper(self): + return getattr(self, name) + + else: + if hasattr(method, METHOD_WRAPPER_ATTR): + return method + name = name or method.__name__ + + if list(signature(method).parameters) == ["self"]: + + @wraps(method) + def wrapper(self): + return getattr(self, name)() + + else: + + @wraps(method) + def wrapper(self, *args, **kwargs): + return getattr(self, name)(*args, **kwargs) + + setattr(wrapper, METHOD_WRAPPER_ATTR, True) + return wrapper + + +class MethodWrapper(Generic[T]): + def __init__(self, method: T): + self._method = method + + def getter(self, func): + self._method = self._method.getter(func) + return self + + def setter(self, func): + self._method = self._method.setter(func) + return self + + def deleter(self, func): + self._method = self._method.deleter(func) + return self + + def __set_name__(self, owner, name): + setattr(owner, name, self._method) + + def __call__(self, *args, **kwargs): + raise RuntimeError("Method __set_name__ has not been called") + + +def method_registerer( + arg: Optional[Callable], + owner: Optional[Type], + register: Callable[[Callable, Type, str], None], +): + def decorator(method: MethodOrProperty): + if owner is None and is_method(method) and method_class(method) is None: + + class Descriptor(MethodWrapper[MethodOrProperty]): + def __set_name__(self, owner, name): + super().__set_name__(owner, name) + register(method_wrapper(method), owner, name) + + return Descriptor(method) + else: + owner2 = owner + if is_method(method): + if owner2 is None: + owner2 = method_class(method) + method = method_wrapper(method) + if owner2 is None: + try: + hints = get_type_hints(method) + owner2 = get_origin_or_type2(hints[next(iter(hints))]) + except (KeyError, StopIteration): + raise TypeError("First parameter of method must be typed") from None + assert not isinstance(method, property) + register(cast(Callable, method), owner2, method.__name__) + return method + + return decorator if arg is None else decorator(arg) diff --git a/.venv/lib/python3.9/site-packages/apischema/objects/__init__.py b/.venv/lib/python3.9/site-packages/apischema/objects/__init__.py new file mode 100644 index 0000000..f26af87 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/objects/__init__.py @@ -0,0 +1,13 @@ +__all__ = [ + "AliasedStr", + "ObjectField", + "get_alias", + "get_field", + "object_deserialization", + "object_fields", + "object_serialization", + "set_object_fields", +] +from .conversions import object_deserialization, object_serialization +from .fields import ObjectField, set_object_fields +from .getters import AliasedStr, get_alias, get_field, object_fields diff --git a/.venv/lib/python3.9/site-packages/apischema/objects/conversions.py b/.venv/lib/python3.9/site-packages/apischema/objects/conversions.py new file mode 100644 index 0000000..d042d1f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/objects/conversions.py @@ -0,0 +1,177 @@ +import inspect +from dataclasses import Field, replace +from types import new_class +from typing import ( + Any, + Callable, + Dict, + Generic, + Iterable, + Mapping, + Sequence, + Tuple, + Type, + TypeVar, + Union, +) + +from apischema.methods import is_method, method_wrapper +from apischema.objects.fields import MISSING_DEFAULT, ObjectField, set_object_fields +from apischema.objects.getters import object_fields, parameters_as_fields +from apischema.type_names import type_name +from apischema.types import OrderedDict +from apischema.typing import get_type_hints +from apischema.utils import ( + empty_dict, + substitute_type_vars, + subtyping_substitution, + to_pascal_case, + with_parameters, +) + +T = TypeVar("T") + + +def object_deserialization( + func: Callable[..., T], + *input_class_modifiers: Callable[[type], Any], + parameters_metadata: Mapping[str, Mapping] = None, +) -> Any: + fields = parameters_as_fields(func, parameters_metadata) + types = get_type_hints(func, include_extras=True) + if "return" not in types: + raise TypeError("Object deserialization must be typed") + return_type = types["return"] + bases = () + if getattr(return_type, "__parameters__", ()): + bases = (Generic[return_type.__parameters__],) # type: ignore + elif func.__name__ != "": + input_class_modifiers = ( + type_name(to_pascal_case(func.__name__)), + *input_class_modifiers, + ) + + def __init__(self, **kwargs): + self.kwargs = kwargs + + input_cls = new_class( + to_pascal_case(func.__name__), + bases, + exec_body=lambda ns: ns.update({"__init__": __init__}), + ) + for modifier in input_class_modifiers: + modifier(input_cls) + set_object_fields(input_cls, fields) + if any(f.additional_properties for f in fields): + kwargs_param = next(f.name for f in fields if f.additional_properties) + + def wrapper(input): + kwargs = input.kwargs.copy() + kwargs.update(kwargs.pop(kwargs_param)) + return func(**kwargs) + + else: + + def wrapper(input): + return func(**input.kwargs) + + wrapper.__annotations__["input"] = with_parameters(input_cls) + wrapper.__annotations__["return"] = return_type + return wrapper + + +def _fields_and_init( + cls: type, fields_and_methods: Union[Iterable[Any], Callable[[], Iterable[Any]]] +) -> Tuple[Sequence[ObjectField], Callable[[Any, Any], None]]: + fields = object_fields(cls, serialization=True) + output_fields: Dict[str, ObjectField] = OrderedDict() + methods = [] + if callable(fields_and_methods): + fields_and_methods = fields_and_methods() + for elt in fields_and_methods: + if elt is ...: + output_fields.update(fields) + continue + if isinstance(elt, tuple): + elt, metadata = elt + else: + metadata = empty_dict + if not isinstance(metadata, Mapping): + raise TypeError(f"Invalid metadata {metadata}") + if isinstance(elt, Field): + elt = elt.name + if isinstance(elt, str) and elt in fields: + elt = fields[elt] + if is_method(elt): + elt = method_wrapper(elt) + if isinstance(elt, ObjectField): + if metadata: + output_fields[elt.name] = replace( + elt, metadata={**elt.metadata, **metadata}, default=MISSING_DEFAULT + ) + else: + output_fields[elt.name] = elt + continue + elif callable(elt): + types = get_type_hints(elt) + first_param = next(iter(inspect.signature(elt).parameters)) + substitution, _ = subtyping_substitution(types.get(first_param, cls), cls) + ret = substitute_type_vars(types.get("return", Any), substitution) + output_fields[elt.__name__] = ObjectField( + elt.__name__, ret, metadata=metadata + ) + methods.append((elt, output_fields[elt.__name__])) + else: + raise TypeError(f"Invalid serialization member {elt} for class {cls}") + + serialized_methods = [m for m, f in methods if output_fields[f.name] is f] + serialized_fields = list( + output_fields.keys() - {m.__name__ for m in serialized_methods} + ) + + def __init__(self, obj): + for field in serialized_fields: + setattr(self, field, getattr(obj, field)) + for method in serialized_methods: + setattr(self, method.__name__, method(obj)) + + return tuple(output_fields.values()), __init__ + + +def object_serialization( + cls: Type[T], + fields_and_methods: Union[Iterable[Any], Callable[[], Iterable[Any]]], + *output_class_modifiers: Callable[[type], Any], +) -> Callable[[T], Any]: + + generic, bases = cls, () + if getattr(cls, "__parameters__", ()): + generic = cls[cls.__parameters__] # type: ignore + bases = Generic[cls.__parameters__] # type: ignore + elif ( + callable(fields_and_methods) + and fields_and_methods.__name__ != "" + and not getattr(cls, "__parameters__", ()) + ): + output_class_modifiers = ( + type_name(to_pascal_case(fields_and_methods.__name__)), + *output_class_modifiers, + ) + + def __init__(self, obj): + _, new_init = _fields_and_init(cls, fields_and_methods) + new_init.__annotations__ = {"obj": generic} + output_cls.__init__ = new_init + new_init(self, obj) + + __init__.__annotations__ = {"obj": generic} + output_cls = new_class( + f"{cls.__name__}Serialization", + bases, + exec_body=lambda ns: ns.update({"__init__": __init__}), + ) + for modifier in output_class_modifiers: + modifier(output_cls) + set_object_fields(output_cls, lambda: _fields_and_init(cls, fields_and_methods)[0]) + + return output_cls diff --git a/.venv/lib/python3.9/site-packages/apischema/objects/fields.py b/.venv/lib/python3.9/site-packages/apischema/objects/fields.py new file mode 100644 index 0000000..b9b5d9a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/objects/fields.py @@ -0,0 +1,260 @@ +from dataclasses import Field, InitVar, MISSING, dataclass, field +from enum import Enum, auto +from types import FunctionType +from typing import ( + Any, + Callable, + Iterable, + Mapping, + MutableMapping, + NoReturn, + Optional, + Pattern, + Sequence, + TYPE_CHECKING, + Union, + cast, +) + +from apischema.cache import CacheAwareDict +from apischema.conversions.conversions import AnyConversion +from apischema.metadata.implem import ( + ConversionMetadata, + SkipMetadata, + ValidatorsMetadata, +) +from apischema.metadata.keys import ( + ALIAS_METADATA, + ALIAS_NO_OVERRIDE_METADATA, + CONVERSION_METADATA, + DEFAULT_AS_SET_METADATA, + FALL_BACK_ON_DEFAULT_METADATA, + FLATTEN_METADATA, + NONE_AS_UNDEFINED_METADATA, + ORDERING_METADATA, + POST_INIT_METADATA, + PROPERTIES_METADATA, + REQUIRED_METADATA, + SCHEMA_METADATA, + SKIP_METADATA, + VALIDATORS_METADATA, +) +from apischema.types import AnyType, ChainMap, NoneType, UndefinedType +from apischema.typing import get_args, is_annotated +from apischema.utils import ( + LazyValue, + empty_dict, + get_args2, + is_union_of, + keep_annotations, +) + +if TYPE_CHECKING: + from apischema.ordering import Ordering + from apischema.schemas import Schema + from apischema.validation.validators import Validator + + +class FieldKind(Enum): + NORMAL = auto() + READ_ONLY = auto() + WRITE_ONLY = auto() + + +# Cannot reuse MISSING for dataclass field because it would be interpreted as no default +MISSING_DEFAULT = object() + + +@dataclass(frozen=True) +class ObjectField: + name: str + type: AnyType + required: bool = True + metadata: Mapping[str, Any] = field(default_factory=lambda: empty_dict) + default: InitVar[Any] = MISSING_DEFAULT + default_factory: Optional[Callable[[], Any]] = None + kind: FieldKind = FieldKind.NORMAL + + def __post_init__(self, default: Any): + if REQUIRED_METADATA in self.full_metadata: + object.__setattr__(self, "required", True) + if self.default_factory is MISSING: + object.__setattr__(self, "default_factory", None) + if not self.required and self.default_factory is None: + if default is MISSING_DEFAULT: + raise ValueError("Missing default for non-required ObjectField") + object.__setattr__(self, "default_factory", LazyValue(default)) + if self.none_as_undefined and is_union_of(self.type, NoneType): + new_type = Union[tuple(a for a in get_args2(self.type) if a != NoneType)] # type: ignore + object.__setattr__(self, "type", keep_annotations(new_type, self.type)) + + @property + def full_metadata(self) -> Mapping[str, Any]: + if not is_annotated(self.type): + return self.metadata + return ChainMap( + self.metadata, + *( + arg + for arg in reversed(get_args(self.type)[1:]) + if isinstance(arg, Mapping) + ), + ) + + @property + def additional_properties(self) -> bool: + return self.full_metadata.get(PROPERTIES_METADATA, ...) is None + + @property + def alias(self) -> str: + return self.full_metadata.get(ALIAS_METADATA, self.name) + + @property + def override_alias(self) -> bool: + return ALIAS_NO_OVERRIDE_METADATA not in self.full_metadata + + @property + def _conversion(self) -> Optional[ConversionMetadata]: + return self.metadata.get(CONVERSION_METADATA) + + @property + def default_as_set(self) -> bool: + return DEFAULT_AS_SET_METADATA in self.full_metadata + + @property + def deserialization(self) -> Optional[AnyConversion]: + conversion = self._conversion + return conversion.deserialization if conversion is not None else None + + @property + def fall_back_on_default(self) -> bool: + return ( + FALL_BACK_ON_DEFAULT_METADATA in self.full_metadata + and self.default_factory is not None + ) + + @property + def flattened(self) -> bool: + return FLATTEN_METADATA in self.full_metadata + + def get_default(self) -> Any: + if self.required: + raise RuntimeError("Field is required") + assert self.default_factory is not None + return self.default_factory() # type: ignore + + @property + def is_aggregate(self) -> bool: + return ( + self.flattened + or self.additional_properties + or self.pattern_properties is not None + ) + + @property + def none_as_undefined(self): + return NONE_AS_UNDEFINED_METADATA in self.full_metadata + + @property + def ordering(self) -> Optional["Ordering"]: + return self.full_metadata.get(ORDERING_METADATA) + + @property + def post_init(self) -> bool: + return POST_INIT_METADATA in self.full_metadata + + @property + def pattern_properties(self) -> Union[Pattern, "ellipsis", None]: # noqa: F821 + return self.full_metadata.get(PROPERTIES_METADATA) + + @property + def schema(self) -> Optional["Schema"]: + return self.metadata.get(SCHEMA_METADATA) + + @property + def serialization(self) -> Optional[AnyConversion]: + conversion = self._conversion + return conversion.serialization if conversion is not None else None + + @property + def skip(self) -> SkipMetadata: + return self.metadata.get(SKIP_METADATA, SkipMetadata()) + + def skippable(self, default: bool, none: bool) -> bool: + return bool( + self.skip.serialization_if + or is_union_of(self.type, UndefinedType) + or ( + self.default_factory is not None + and (self.skip.serialization_default or default) + ) + or self.none_as_undefined + or (none and is_union_of(self.type, NoneType)) + ) + + @property + def undefined(self) -> bool: + return is_union_of(self.type, UndefinedType) + + @property + def validators(self) -> Sequence["Validator"]: + if VALIDATORS_METADATA in self.metadata: + return cast( + ValidatorsMetadata, self.metadata[VALIDATORS_METADATA] + ).validators + else: + return () + + +FieldOrName = Union[str, ObjectField, Field] + + +def _bad_field(obj: Any, methods: bool) -> NoReturn: + method_types = "property/types.FunctionType" if methods else "" + raise TypeError( + f"Expected dataclasses.Field/apischema.ObjectField/str{method_types}, found {obj}" + ) + + +def check_field_or_name(field_or_name: Any, *, methods: bool = False): + method_types = (property, FunctionType) if methods else () + if not isinstance(field_or_name, (str, ObjectField, Field, *method_types)): + _bad_field(field_or_name, methods) + + +def get_field_name(field_or_name: Any, *, methods: bool = False) -> str: + if isinstance(field_or_name, (Field, ObjectField)): + return field_or_name.name + elif isinstance(field_or_name, str): + return field_or_name + elif ( + methods + and isinstance(field_or_name, property) + and field_or_name.fget is not None + ): + return field_or_name.fget.__name__ + elif methods and isinstance(field_or_name, FunctionType): + return field_or_name.__name__ + else: + _bad_field(field_or_name, methods) + + +_class_fields: MutableMapping[ + type, Callable[[], Sequence[ObjectField]] +] = CacheAwareDict({}) + + +def set_object_fields( + cls: type, + fields: Union[Iterable[ObjectField], Callable[[], Sequence[ObjectField]], None], +): + if fields is None: + _class_fields.pop(cls, ...) + elif callable(fields): + _class_fields[cls] = fields + else: + _class_fields[cls] = lambda fields=tuple(fields): fields # type: ignore + + +def default_object_fields(cls: type) -> Optional[Sequence[ObjectField]]: + return _class_fields[cls]() if cls in _class_fields else None diff --git a/.venv/lib/python3.9/site-packages/apischema/objects/getters.py b/.venv/lib/python3.9/site-packages/apischema/objects/getters.py new file mode 100644 index 0000000..82ab8d0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/objects/getters.py @@ -0,0 +1,151 @@ +import inspect +from typing import ( + Any, + Callable, + Mapping, + Optional, + Sequence, + Type, + TypeVar, + Union, + cast, + overload, +) + +from apischema.cache import cache +from apischema.metadata import properties +from apischema.objects.fields import ObjectField +from apischema.objects.visitor import ObjectVisitor +from apischema.types import AnyType, OrderedDict +from apischema.typing import _GenericAlias, get_type_hints +from apischema.utils import empty_dict +from apischema.visitor import Unsupported + + +@cache +def object_fields( + tp: AnyType, + deserialization: bool = False, + serialization: bool = False, + default: Optional[ + Callable[[type], Optional[Sequence[ObjectField]]] + ] = ObjectVisitor._default_fields, +) -> Mapping[str, ObjectField]: + class GetFields(ObjectVisitor[Sequence[ObjectField]]): + def _skip_field(self, field: ObjectField) -> bool: + return (field.skip.deserialization and serialization) or ( + field.skip.serialization and deserialization + ) + + @staticmethod + def _default_fields(cls: type) -> Optional[Sequence[ObjectField]]: + return None if default is None else default(cls) + + def object( + self, cls: Type, fields: Sequence[ObjectField] + ) -> Sequence[ObjectField]: + return fields + + try: + return OrderedDict((f.name, f) for f in GetFields().visit(tp)) + except (Unsupported, NotImplementedError): + raise TypeError(f"{tp} doesn't have fields") + + +def object_fields2(obj: Any) -> Mapping[str, ObjectField]: + return object_fields( + obj if isinstance(obj, (type, _GenericAlias)) else obj.__class__ + ) + + +T = TypeVar("T") + + +class FieldGetter: + def __init__(self, obj: Any): + self.fields = object_fields2(obj) + + def __getattribute__(self, name: str) -> ObjectField: + try: + return object.__getattribute__(self, "fields")[name] + except KeyError: + raise AttributeError(name) + + +@overload +def get_field(obj: Type[T]) -> T: + ... + + +@overload +def get_field(obj: T) -> T: + ... + + +# Overload because of Mypy issue +# https://github.com/python/mypy/issues/9003#issuecomment-667418520 +def get_field(obj: Union[Type[T], T]) -> T: + return cast(T, FieldGetter(obj)) + + +class AliasedStr(str): + pass + + +class AliasGetter: + def __init__(self, obj: Any): + self.fields = object_fields2(obj) + + def __getattribute__(self, name: str) -> str: + try: + return AliasedStr(object.__getattribute__(self, "fields")[name].alias) + except KeyError: + raise AttributeError(name) + + +@overload +def get_alias(obj: Type[T]) -> T: + ... + + +@overload +def get_alias(obj: T) -> T: + ... + + +def get_alias(obj: Union[Type[T], T]) -> T: + return cast(T, AliasGetter(obj)) + + +def parameters_as_fields( + func: Callable, parameters_metadata: Mapping[str, Mapping] = None +) -> Sequence[ObjectField]: + parameters_metadata = parameters_metadata or {} + types = get_type_hints(func, include_extras=True) + fields = [] + for param_name, param in inspect.signature(func).parameters.items(): + if param.kind is inspect.Parameter.POSITIONAL_ONLY: + raise TypeError("Positional only parameters are not supported") + param_type = types.get(param_name, Any) + if param.kind in { + inspect.Parameter.POSITIONAL_OR_KEYWORD, + inspect.Parameter.KEYWORD_ONLY, + }: + field = ObjectField( + param_name, + param_type, + param.default is inspect.Parameter.empty, + parameters_metadata.get(param_name, empty_dict), + default=param.default, + ) + fields.append(field) + elif param.kind == inspect.Parameter.VAR_KEYWORD: + field = ObjectField( + param_name, + Mapping[str, param_type], # type: ignore + False, + properties | parameters_metadata.get(param_name, empty_dict), + default_factory=dict, + ) + fields.append(field) + return fields diff --git a/.venv/lib/python3.9/site-packages/apischema/objects/visitor.py b/.venv/lib/python3.9/site-packages/apischema/objects/visitor.py new file mode 100644 index 0000000..9573564 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/objects/visitor.py @@ -0,0 +1,153 @@ +from dataclasses import Field, MISSING +from typing import Any, Collection, Mapping, Optional, Sequence + +from apischema.aliases import Aliaser, get_class_aliaser +from apischema.conversions.conversions import AnyConversion +from apischema.dataclasses import replace +from apischema.metadata.keys import ALIAS_METADATA +from apischema.objects.fields import FieldKind, MISSING_DEFAULT, ObjectField +from apischema.types import AnyType, Undefined +from apischema.typing import get_args +from apischema.utils import get_origin_or_type, get_parameters, substitute_type_vars +from apischema.visitor import Result, Visitor + + +def object_field_from_field( + field: Field, field_type: AnyType, init_var: bool +) -> ObjectField: + required = field.default is MISSING and field.default_factory is MISSING # type: ignore + if init_var: + kind = FieldKind.WRITE_ONLY + elif not field.init: + kind = FieldKind.READ_ONLY + else: + kind = FieldKind.NORMAL + return ObjectField( + field.name, + field_type, + required, + field.metadata, + default=field.default, + default_factory=field.default_factory, # type: ignore + kind=kind, + ) + + +def _override_alias(field: ObjectField, aliaser: Aliaser) -> ObjectField: + if field.override_alias: + return replace( + field, + metadata={**field.metadata, ALIAS_METADATA: aliaser(field.alias)}, + default=MISSING_DEFAULT, + ) + else: + return field + + +class ObjectVisitor(Visitor[Result]): + _field_kind_filtered: Optional[FieldKind] = None + + def _field_conversion(self, field: ObjectField) -> Optional[AnyConversion]: + raise NotImplementedError + + def _skip_field(self, field: ObjectField) -> bool: + raise NotImplementedError + + @staticmethod + def _default_fields(cls: type) -> Optional[Sequence[ObjectField]]: + from apischema import settings + + return settings.default_object_fields(cls) + + def _override_fields( + self, tp: AnyType, fields: Sequence[ObjectField] + ) -> Sequence[ObjectField]: + + origin = get_origin_or_type(tp) + if isinstance(origin, type): + default_fields = self._default_fields(origin) + if default_fields is not None: + if get_args(tp): + sub = dict(zip(get_parameters(origin), get_args(tp))) + default_fields = [ + replace(f, type=substitute_type_vars(f.type, sub)) + for f in default_fields + ] + return default_fields + return fields + + def _object(self, tp: AnyType, fields: Sequence[ObjectField]) -> Result: + fields = [f for f in fields if not self._skip_field(f)] + aliaser = get_class_aliaser(get_origin_or_type(tp)) + if aliaser is not None: + fields = [_override_alias(f, aliaser) for f in fields] + return self.object(tp, fields) + + def dataclass( + self, + tp: AnyType, + types: Mapping[str, AnyType], + fields: Sequence[Field], + init_vars: Sequence[Field], + ) -> Result: + by_name = { + f.name: object_field_from_field(f, types[f.name], init_var) + for field_group, init_var in [(fields, False), (init_vars, True)] + for f in field_group + } + object_fields = [ + by_name[name] + for name in types + if name in by_name and by_name[name].kind != self._field_kind_filtered + ] + return self._object(tp, self._override_fields(tp, object_fields)) + + def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> Result: + raise NotImplementedError + + def named_tuple( + self, tp: AnyType, types: Mapping[str, AnyType], defaults: Mapping[str, Any] + ) -> Result: + fields = [ + ObjectField(name, type_, name not in defaults, default=defaults.get(name)) + for name, type_ in types.items() + ] + return self._object(tp, self._override_fields(tp, fields)) + + def typed_dict( + self, tp: AnyType, types: Mapping[str, AnyType], required_keys: Collection[str] + ) -> Result: + fields = [ + ObjectField(name, type_, name in required_keys, default=Undefined) + for name, type_ in types.items() + ] + return self._object(tp, self._override_fields(tp, fields)) + + def unsupported(self, tp: AnyType) -> Result: + dummy: list = [] + fields = self._override_fields(tp, dummy) + return super().unsupported(tp) if fields is dummy else self._object(tp, fields) + + +class DeserializationObjectVisitor(ObjectVisitor[Result]): + _field_kind_filtered = FieldKind.READ_ONLY + + @staticmethod + def _field_conversion(field: ObjectField) -> Optional[AnyConversion]: + return field.deserialization + + @staticmethod + def _skip_field(field: ObjectField) -> bool: + return field.skip.deserialization + + +class SerializationObjectVisitor(ObjectVisitor[Result]): + _field_kind_filtered = FieldKind.WRITE_ONLY + + @staticmethod + def _field_conversion(field: ObjectField) -> Optional[AnyConversion]: + return field.serialization + + @staticmethod + def _skip_field(field: ObjectField) -> bool: + return field.skip.serialization diff --git a/.venv/lib/python3.9/site-packages/apischema/ordering.py b/.venv/lib/python3.9/site-packages/apischema/ordering.py new file mode 100644 index 0000000..850bbd6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/ordering.py @@ -0,0 +1,142 @@ +from collections import defaultdict +from dataclasses import dataclass +from typing import ( + Any, + Callable, + Collection, + Dict, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + TypeVar, + overload, +) + +from apischema.cache import CacheAwareDict +from apischema.metadata.keys import ORDERING_METADATA +from apischema.types import MetadataMixin +from apischema.utils import stop_signature_abuse + +Cls = TypeVar("Cls", bound=type) + + +@dataclass(frozen=True) +class Ordering(MetadataMixin): + key = ORDERING_METADATA + order: Optional[int] = None + after: Optional[Any] = None + before: Optional[Any] = None + + def __post_init__(self): + from apischema.objects.fields import check_field_or_name + + if self.after is not None: + check_field_or_name(self.after, methods=True) + if self.before is not None: + check_field_or_name(self.before, methods=True) + + +_order_overriding: MutableMapping[type, Mapping[Any, Ordering]] = CacheAwareDict({}) + + +@overload +def order(__value: int) -> Ordering: + ... + + +@overload +def order(*, after: Any) -> Ordering: + ... + + +@overload +def order(*, before: Any) -> Ordering: + ... + + +@overload +def order(__fields: Sequence[Any]) -> Callable[[Cls], Cls]: + ... + + +@overload +def order(__override: Mapping[Any, Ordering]) -> Callable[[Cls], Cls]: + ... + + +def order(__arg=None, *, before=None, after=None): + if len([arg for arg in (__arg, before, after) if arg is not None]) != 1: + stop_signature_abuse() + if isinstance(__arg, Sequence): + __arg = {field: order(after=prev) for field, prev in zip(__arg[1:], __arg)} + if isinstance(__arg, Mapping): + if not all(isinstance(val, Ordering) for val in __arg.values()): + stop_signature_abuse() + + def decorator(cls: Cls) -> Cls: + _order_overriding[cls] = __arg + return cls + + return decorator + elif __arg is not None and not isinstance(__arg, int): + stop_signature_abuse() + else: + return Ordering(__arg, after, before) + + +def get_order_overriding(cls: type) -> Mapping[str, Ordering]: + from apischema.objects.fields import get_field_name + + return { + get_field_name(field, methods=True): ordering + for sub_cls in reversed(cls.__mro__) + if sub_cls in _order_overriding + for field, ordering in _order_overriding[sub_cls].items() + } + + +T = TypeVar("T") + + +def sort_by_order( + cls: type, + elts: Collection[T], + name: Callable[[T], str], + order: Callable[[T], Optional[Ordering]], +) -> Sequence[T]: + from apischema.objects.fields import get_field_name + + order_overriding = get_order_overriding(cls) + groups: Dict[int, List[T]] = defaultdict(list) + after: Dict[str, List[T]] = defaultdict(list) + before: Dict[str, List[T]] = defaultdict(list) + for elt in elts: + ordering = order_overriding.get(name(elt), order(elt)) + if ordering is None: + groups[0].append(elt) + elif ordering.order is not None: + groups[ordering.order].append(elt) + elif ordering.after is not None: + after[get_field_name(ordering.after, methods=True)].append(elt) + elif ordering.before is not None: + before[get_field_name(ordering.before, methods=True)].append(elt) + else: + raise NotImplementedError + if not after and not before and len(groups) == 1: + return next(iter(groups.values())) + result = [] + + def add_to_result(elt: T): + elt_name = name(elt) + for before_elt in before[elt_name]: + add_to_result(before_elt) + result.append(elt) + for after_elt in after[elt_name]: + add_to_result(after_elt) + + for value in sorted(groups): + for elt in groups[value]: + add_to_result(elt) + return result diff --git a/.venv/lib/python3.9/site-packages/apischema/py.typed b/.venv/lib/python3.9/site-packages/apischema/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/apischema/recursion.py b/.venv/lib/python3.9/site-packages/apischema/recursion.py new file mode 100644 index 0000000..d05b6b1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/recursion.py @@ -0,0 +1,178 @@ +from enum import Enum +from typing import ( + Any, + Collection, + Dict, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Type, +) + +from apischema.cache import cache +from apischema.conversions import AnyConversion +from apischema.conversions.conversions import DefaultConversion +from apischema.conversions.visitor import ( + Conv, + ConversionsVisitor, + Deserialization, + DeserializationVisitor, + Serialization, + SerializationVisitor, +) +from apischema.objects import ObjectField +from apischema.objects.visitor import ( + DeserializationObjectVisitor, + ObjectVisitor, + SerializationObjectVisitor, +) +from apischema.types import AnyType +from apischema.utils import Lazy +from apischema.visitor import Result + +RecursionKey = Tuple[AnyType, Optional[AnyConversion]] + + +class RecursiveChecker(ConversionsVisitor[Conv, Any], ObjectVisitor[Any]): + def __init__(self, default_conversion: DefaultConversion): + super().__init__(default_conversion) + self._cache = recursion_cache(self.__class__) + self._recursive: Dict[RecursionKey, Set[RecursionKey]] = {} + self._all_recursive: Set[RecursionKey] = set() + self._guard: List[RecursionKey] = [] + self._guard_indices: Dict[RecursionKey, int] = {} + + def any(self): + pass + + def collection(self, cls: Type[Collection], value_type: AnyType): + return self.visit(value_type) + + def enum(self, cls: Type[Enum]): + pass + + def literal(self, values: Sequence[Any]): + pass + + def mapping(self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType): + self.visit(key_type) + self.visit(value_type) + + def object(self, tp: AnyType, fields: Sequence[ObjectField]): + for field in fields: + self.visit_with_conv(field.type, self._field_conversion(field)) + + def primitive(self, cls: Type): + pass + + def tuple(self, types: Sequence[AnyType]): + for tp in types: + self.visit(tp) + + def _visited_union(self, results: Sequence): + pass + + def unsupported(self, tp: AnyType): + pass + + def visit(self, tp: AnyType): + rec_key = (tp, self._conversion) + if rec_key in self._cache: + pass + elif rec_key in self._guard_indices: + recursive = self._guard[self._guard_indices[rec_key] :] + self._recursive.setdefault(rec_key, set()).update(recursive) + self._all_recursive.update(recursive) + else: + self._guard_indices[rec_key] = len(self._guard) + self._guard.append(rec_key) + try: + super().visit(tp) + finally: + self._guard.pop() + self._guard_indices.pop(rec_key) + if rec_key in self._recursive: + for key in self._recursive[rec_key]: + self._cache[key] = True + assert self._cache[rec_key] + elif rec_key not in self._all_recursive: + self._cache[rec_key] = False + + +class DeserializationRecursiveChecker( + DeserializationVisitor, + DeserializationObjectVisitor, + RecursiveChecker[Deserialization], +): + pass + + +class SerializationRecursiveChecker( + SerializationVisitor, SerializationObjectVisitor, RecursiveChecker[Serialization] +): + pass + + +@cache # use @cache for reset +def recursion_cache(checker_cls: Type[RecursiveChecker]) -> Dict[RecursionKey, bool]: + return {} + + +@cache +def is_recursive( + tp: AnyType, + conversion: Optional[AnyConversion], + default_conversion: DefaultConversion, + checker_cls: Type[RecursiveChecker], +) -> bool: + cache, rec_key = recursion_cache(checker_cls), (tp, conversion) + if rec_key not in cache: + checker_cls(default_conversion).visit_with_conv(tp, conversion) + return cache[rec_key] + + +class RecursiveConversionsVisitor(ConversionsVisitor[Conv, Result]): + def __init__(self, default_conversion: DefaultConversion): + super().__init__(default_conversion) + self._cache: Dict[Tuple[AnyType, Optional[AnyConversion]], Result] = {} + self._first_visit = True + + def _recursive_result(self, lazy: Lazy[Result]) -> Result: + raise NotImplementedError + + def visit_not_recursive(self, tp: AnyType) -> Result: + return super().visit(tp) + + def visit(self, tp: AnyType) -> Result: + if is_recursive( + tp, + self._conversion, + self.default_conversion, + DeserializationRecursiveChecker # type: ignore + if isinstance(self, DeserializationVisitor) + else SerializationRecursiveChecker, + # None, + ): + cache_key = tp, self._conversion + if cache_key in self._cache: + return self._cache[cache_key] + result = None + + def lazy_result(): + assert result is not None + return result + + self._cache[cache_key] = self._recursive_result(lazy_result) + try: + result = super().visit(tp) + finally: + del self._cache[cache_key] + return result + elif self._first_visit: + self._first_visit = False + return super().visit(tp) + else: + return self.visit_not_recursive(tp) diff --git a/.venv/lib/python3.9/site-packages/apischema/schemas/__init__.py b/.venv/lib/python3.9/site-packages/apischema/schemas/__init__.py new file mode 100644 index 0000000..1914d32 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/schemas/__init__.py @@ -0,0 +1,142 @@ +import re +import warnings +from dataclasses import dataclass, replace +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Pattern, + Sequence, + TypeVar, + Union, +) + +from apischema.metadata.keys import SCHEMA_METADATA +from apischema.schemas.annotations import Annotations, ContentEncoding, Deprecated +from apischema.schemas.constraints import Constraints +from apischema.types import AnyType, MetadataMixin, Number, Undefined +from apischema.typing import is_annotated +from apischema.utils import merge_opts, replace_builtins + +T = TypeVar("T") +Extra = Union[Mapping[str, Any], Callable[[Dict[str, Any]], None]] + + +@dataclass(frozen=True) +class Schema(MetadataMixin): + key = SCHEMA_METADATA + annotations: Optional[Annotations] = None + constraints: Optional[Constraints] = None + extra: Optional[Callable[[Dict[str, Any]], None]] = None + override: bool = False + child: Optional["Schema"] = None + + def __call__(self, tp: T) -> T: + if is_annotated(tp): + raise TypeError("Cannot register schema on Annotated type") + _schemas[replace_builtins(tp)] = self + return tp + + def __set_name__(self, owner, name): + self.__call__(owner) + + def merge_into(self, base_schema: Dict[str, Any]): + if self.override: + base_schema.clear() + elif self.child is not None: + self.child.merge_into(base_schema) + if self.constraints is not None: + self.constraints.merge_into(base_schema) + if self.annotations is not None: + self.annotations.merge_into(base_schema) + if self.extra is not None: + self.extra(base_schema) # type: ignore + + +def schema( + *, + # annotations + title: Optional[str] = None, + description: Optional[str] = None, + default: Any = Undefined, + examples: Optional[Sequence[Any]] = None, + deprecated: Optional[Deprecated] = None, + # number + min: Optional[Number] = None, + max: Optional[Number] = None, + exc_min: Optional[Number] = None, + exc_max: Optional[Number] = None, + mult_of: Optional[Number] = None, + # string + format: Optional[str] = None, + media_type: Optional[str] = None, + encoding: Optional[ContentEncoding] = None, + min_len: Optional[int] = None, + max_len: Optional[int] = None, + pattern: Optional[Union[str, Pattern]] = None, + # array + min_items: Optional[int] = None, + max_items: Optional[int] = None, + unique: Optional[bool] = None, + # objects + min_props: Optional[int] = None, + max_props: Optional[int] = None, + # extra + extra: Optional[Extra] = None, + override: bool = False, +) -> Schema: + if default is ...: + warnings.warn( + "default=... is deprecated as default value is now" + " automatically added to the schema", + DeprecationWarning, + ) + default = Undefined + default = None if default is Undefined else (lambda d=default: d) + if pattern is not None: + pattern = re.compile(pattern) + if isinstance(extra, Mapping): + extra = lambda js, to_update=extra: js.update(to_update) # type: ignore + annotations = Annotations( + title, description, default, examples, format, deprecated, media_type, encoding + ) + constraints = Constraints( + min, + max, + exc_min, + exc_max, + mult_of, + min_len, + max_len, + pattern, + min_items, + max_items, + unique, + min_props, + max_props, + ) + return Schema(annotations, constraints, extra, override) + + +def default_schema(tp: AnyType) -> Optional[Schema]: + return None + + +_schemas: Dict[Any, Schema] = {} + + +def get_schema(tp: AnyType) -> Optional[Schema]: + tp = replace_builtins(tp) + try: + return _schemas.get(tp) + except TypeError: + return None + + +@merge_opts +def merge_schema(default: Schema, override: Schema) -> Schema: + if override.override: + return override + return replace(override, child=merge_schema(default, override.child)) diff --git a/.venv/lib/python3.9/site-packages/apischema/schemas/annotations.py b/.venv/lib/python3.9/site-packages/apischema/schemas/annotations.py new file mode 100644 index 0000000..1818bbe --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/schemas/annotations.py @@ -0,0 +1,36 @@ +from dataclasses import dataclass +from typing import Any, Callable, Dict, Optional, Sequence, Union + +from apischema.utils import to_camel_case + +Deprecated = Union[bool, str] +try: + from apischema.typing import Literal + + ContentEncoding = Literal["7bit", "8bit", "binary", "quoted-printable", "base64"] +except ImportError: + ContentEncoding = str # type: ignore + + +@dataclass(frozen=True) +class Annotations: + title: Optional[str] = None + description: Optional[str] = None + default: Optional[Callable[[], Any]] = None + examples: Optional[Sequence[Any]] = None + format: Optional[str] = None + deprecated: Optional[Deprecated] = None + media_type: Optional[str] = None + encoding: Optional[ContentEncoding] = None + + def merge_into(self, base_schema: Dict[str, Any]): + if self.deprecated: + base_schema["deprecated"] = bool(self.deprecated) + for k in ("title", "description", "examples", "format"): + if getattr(self, k) is not None: + base_schema[k] = getattr(self, k) + for k in ("media_type", "encoding"): + if getattr(self, k) is not None: + base_schema[to_camel_case("content_" + k)] = getattr(self, k) + if self.default is not None: + base_schema["default"] = self.default() diff --git a/.venv/lib/python3.9/site-packages/apischema/schemas/constraints.py b/.venv/lib/python3.9/site-packages/apischema/schemas/constraints.py new file mode 100644 index 0000000..c6a48d0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/schemas/constraints.py @@ -0,0 +1,155 @@ +import operator as op +from collections import defaultdict +from dataclasses import dataclass, field, fields +from math import gcd +from typing import ( + Any, + Callable, + Collection, + Dict, + Mapping, + Optional, + Pattern, + Tuple, + TypeVar, +) + +from apischema.types import Number +from apischema.utils import merge_opts, to_hashable + +T = TypeVar("T") +U = TypeVar("U") + +COMPARISON_MERGE_AND_ERRORS: Dict[Callable, Tuple[Callable, str]] = { + op.lt: (max, "less than %s"), + op.le: (max, "less than or equal to %s"), + op.gt: (min, "greater than %s"), + op.ge: (min, "greater than or equal to %s"), +} +PREFIX_DICT: Mapping[type, str] = { + str: "string length", + list: "item count", + dict: "property count", +} +Check = Callable[[Any, Any], Any] +CONSTRAINT_METADATA_KEY = "constraint" + + +@dataclass +class ConstraintMetadata: + alias: str + cls: type + check: Check + error: Callable[[Any], str] + merge: Callable[[T, T], T] + + @property + def field(self) -> Any: + return field(default=None, metadata={CONSTRAINT_METADATA_KEY: self}) + + +def comparison(alias: str, cls: type, check: Check) -> Any: + merge, error = COMPARISON_MERGE_AND_ERRORS[check] + prefix = PREFIX_DICT.get(cls) # type: ignore + if prefix: + error = prefix + " " + error.replace("less", "lower") + if cls in (str, list, dict): + wrapped = check + + def check(data: Any, value: Any) -> bool: + return wrapped(len(data), value) + + return ConstraintMetadata(alias, cls, check, lambda v: error % v, merge).field + + +def merge_mult_of(m1: Number, m2: Number) -> Number: + if not isinstance(m1, int) and not isinstance(m2, int): + raise TypeError("multipleOf merging is only supported with integers") + return m1 * m2 / gcd(m1, m2) # type: ignore + + +def not_match_pattern(data: str, pattern: Pattern) -> bool: + return not pattern.match(data) + + +def merge_pattern(p1: Pattern, p2: Pattern) -> Pattern: + raise TypeError("Cannot merge patterns") + + +def not_unique(data: list, unique: bool) -> bool: + return (op.ne if unique else op.eq)(len(set(map(to_hashable, data))), len(data)) + + +@dataclass(frozen=True) +class Constraints: + # number + min: Optional[Number] = comparison("minimum", float, op.lt) + max: Optional[Number] = comparison("maximum", float, op.gt) + exc_min: Optional[Number] = comparison("exclusiveMinimum", float, op.le) + exc_max: Optional[Number] = comparison("exclusiveMaximum", float, op.ge) + mult_of: Optional[Number] = ConstraintMetadata( + "multipleOf", float, op.mod, lambda n: f"not a multiple of {n}", merge_mult_of # type: ignore + ).field + # string + min_len: Optional[int] = comparison("minLength", str, op.lt) + max_len: Optional[int] = comparison("maxLength", str, op.gt) + pattern: Optional[Pattern] = ConstraintMetadata( + "pattern", + str, + not_match_pattern, + lambda p: f"not matching '{p.pattern}'", + merge_pattern, # type: ignore + ).field + # array + min_items: Optional[int] = comparison("minItems", list, op.lt) + max_items: Optional[int] = comparison("maxItems", list, op.gt) + unique: Optional[bool] = ConstraintMetadata( + "uniqueItems", list, not_unique, lambda _: "duplicate items", op.or_ + ).field + # object + min_props: Optional[int] = comparison("minProperties", dict, op.lt) + max_props: Optional[int] = comparison("maxProperties", dict, op.gt) + + @property + def attr_and_metata( + self, + ) -> Collection[Tuple[str, Optional[Any], ConstraintMetadata]]: + return [ + (f.name, getattr(self, f.name), f.metadata[CONSTRAINT_METADATA_KEY]) + for f in fields(self) + if CONSTRAINT_METADATA_KEY in f.metadata + ] + + @property + def checks_by_type(self) -> Mapping[type, Collection[Tuple[Check, Any, str]]]: + result = defaultdict(list) + for _, attr, metadata in self.attr_and_metata: + if attr is None: + continue + error = f"{metadata.error(attr)} ({metadata.alias})" + result[metadata.cls].append((metadata.check, attr, error)) + result[int] = result[float] + return result + + def merge_into(self, base_schema: Dict[str, Any]): + for name, attr, metadata in self.attr_and_metata: + if attr is not None: + alias = metadata.alias + if alias in base_schema: + base_schema[alias] = metadata.merge(attr, base_schema[alias]) # type: ignore + else: + base_schema[alias] = attr + + +@merge_opts +def merge_constraints(c1: Constraints, c2: Constraints) -> Constraints: + constraints: Dict[str, Any] = {} + for name, attr1, metadata in c1.attr_and_metata: + attr2 = getattr(c2, name) + if attr1 is None: + constraints[name] = attr2 + elif attr2 is None: + constraints[name] = attr1 + else: + constraints[name] = metadata.merge(attr1, attr2) # type: ignore + return Constraints(**constraints) # type: ignore diff --git a/.venv/lib/python3.9/site-packages/apischema/serialization/__init__.py b/.venv/lib/python3.9/site-packages/apischema/serialization/__init__.py new file mode 100644 index 0000000..4fd0b2f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/serialization/__init__.py @@ -0,0 +1,621 @@ +import collections.abc +import operator +from contextlib import suppress +from dataclasses import dataclass +from enum import Enum +from functools import lru_cache +from typing import ( + Any, + Callable, + Collection, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) + +from apischema.aliases import Aliaser +from apischema.cache import cache +from apischema.conversions.conversions import AnyConversion, DefaultConversion +from apischema.conversions.utils import Converter +from apischema.conversions.visitor import ( + Serialization, + SerializationVisitor, + sub_conversion, +) +from apischema.fields import FIELDS_SET_ATTR, support_fields_set +from apischema.objects import AliasedStr, ObjectField +from apischema.objects.visitor import SerializationObjectVisitor +from apischema.ordering import sort_by_order +from apischema.recursion import RecursiveConversionsVisitor +from apischema.serialization.serialized_methods import get_serialized_methods +from apischema.types import AnyType, NoneType, Undefined, UndefinedType +from apischema.typing import is_new_type, is_type, is_type_var, is_typed_dict, is_union +from apischema.utils import ( + Lazy, + as_predicate, + deprecate_kwargs, + get_args2, + get_origin_or_type, + get_origin_or_type2, + identity, + is_union_of, + opt_or, +) +from apischema.visitor import Unsupported + +SerializationMethod = Callable[[Any], Any] +SerializationMethodFactory = Callable[[AnyType], SerializationMethod] + + +T = TypeVar("T") + + +def instance_checker(tp: AnyType) -> Tuple[Callable[[Any, Any], bool], Any]: + origin = get_origin_or_type2(tp) + if origin is NoneType: + return operator.is_, None + elif is_typed_dict(origin): + return isinstance, collections.abc.Mapping + elif is_type(origin): + return isinstance, origin + elif is_new_type(origin): + return instance_checker(origin.__supertype__) + elif is_type_var(origin) or origin is Any: + return (lambda data, _: True), ... + elif is_union(origin): + checks = list(map(instance_checker, get_args2(tp))) + return (lambda data, _: any(check(data, arg) for check, arg in checks)), ... + else: + raise TypeError(f"{tp} is not supported in union serialization") + + +def identity_as_none(method: SerializationMethod) -> Optional[SerializationMethod]: + return method if method is not identity else None + + +@dataclass(frozen=True) +class PassThroughOptions: + any: bool = False + collections: bool = False + enums: bool = False + tuple: bool = False + types: Union[Collection[AnyType], Callable[[AnyType], bool]] = () + + def __post_init__(self): + object.__setattr__(self, "types", as_predicate(self.types)) + if self.collections and not self.tuple: + object.__setattr__(self, "tuple", True) + + +class SerializationMethodVisitor( + RecursiveConversionsVisitor[Serialization, SerializationMethod], + SerializationVisitor[SerializationMethod], + SerializationObjectVisitor[SerializationMethod], +): + use_cache: bool = True + + def __init__( + self, + additional_properties: bool, + aliaser: Aliaser, + check_type: bool, + default_conversion: DefaultConversion, + exclude_defaults: bool, + exclude_none: bool, + exclude_unset: bool, + fall_back_on_any: bool, + pass_through_options: PassThroughOptions, + ): + super().__init__(default_conversion) + self.additional_properties = additional_properties + self.aliaser = aliaser + self.check_type = check_type + self.exclude_defaults = exclude_defaults + self.exclude_none = exclude_none + self.exclude_unset = exclude_unset + self.fall_back_on_any = fall_back_on_any + self.pass_through_options = pass_through_options + self.pass_through_type = as_predicate(self.pass_through_options.types) + + @property + def _factory(self) -> SerializationMethodFactory: + return serialization_method_factory( + self.additional_properties, + self.aliaser, + self.check_type, + self._conversion, + self.default_conversion, + self.exclude_defaults, + self.exclude_none, + self.exclude_unset, + self.fall_back_on_any, + self.pass_through_options, + ) + + def visit_not_recursive(self, tp: AnyType): + return self._factory(tp) if self.use_cache else super().visit_not_recursive(tp) + + def _recursive_result(self, lazy: Lazy[SerializationMethod]) -> SerializationMethod: + rec_method = None + + def method(obj: Any) -> Any: + nonlocal rec_method + if rec_method is None: + rec_method = lazy() + return rec_method(obj) + + return method + + def any(self) -> SerializationMethod: + if self.pass_through_options.any: + return identity + factory = self._factory + + def method(obj: Any) -> Any: + return factory(obj.__class__)(obj) + + return method + + def _any_fallback(self, tp: AnyType) -> SerializationMethod: + fallback, serialize_any = self.fall_back_on_any, self.any() + + def method(obj: Any) -> Any: + if fallback: + return serialize_any(obj) + else: + raise TypeError(f"Expected {tp}, found {obj.__class__}") + + return method + + def _wrap(self, cls: type, method: SerializationMethod) -> SerializationMethod: + if not self.check_type: + return method + fallback = self._any_fallback(cls) + cls_to_check = Mapping if is_typed_dict(cls) else cls + + def wrapper(obj: Any) -> Any: + return method(obj) if isinstance(obj, cls_to_check) else fallback(obj) + + return wrapper + + def collection( + self, cls: Type[Collection], value_type: AnyType + ) -> SerializationMethod: + serialize_value = self.visit(value_type) + + method: SerializationMethod + if serialize_value is not identity: + + def method(obj: Any) -> Any: + # using map is faster than comprehension + return list(map(serialize_value, obj)) + + elif ( + issubclass(cls, list) + or (issubclass(cls, tuple) and self.pass_through_options.tuple) + or ( + self.pass_through_options.collections + and not issubclass(cls, collections.abc.Set) + ) + ): + method = identity + else: + method = list + + return self._wrap(cls, method) + + def enum(self, cls: Type[Enum]) -> SerializationMethod: + if self.pass_through_options.enums or issubclass(cls, (int, str)): + return identity + elif all( + method is identity + for method in map(self.visit, {elt.value.__class__ for elt in cls}) + ): + method: SerializationMethod = operator.attrgetter("value") + else: + any_method = self.any() + + def method(obj: Any) -> Any: + return any_method(obj.value) + + return self._wrap(cls, method) + + def literal(self, values: Sequence[Any]) -> SerializationMethod: + if self.pass_through_options.enums or all( + isinstance(v, (int, str)) for v in values + ): + return identity + else: + return self.any() + + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> SerializationMethod: + serialize_key, serialize_value = self.visit(key_type), self.visit(value_type) + method: SerializationMethod + if serialize_key is not identity or serialize_value is not identity: + + def method(obj: Any) -> Any: + return { + serialize_key(key): serialize_value(value) + for key, value in obj.items() + } + + elif self.pass_through_options.collections or issubclass(cls, dict): + method = identity + else: + method = dict + + return self._wrap(cls, method) + + def object(self, tp: AnyType, fields: Sequence[ObjectField]) -> SerializationMethod: + cls = get_origin_or_type(tp) + typed_dict = is_typed_dict(cls) + getter: Callable[[str], Callable[[Any], Any]] = ( + operator.itemgetter if typed_dict else operator.attrgetter + ) + serialization_fields = [ + ( + field.name, + self.aliaser(field.alias) if not field.is_aggregate else None, + getter(field.name), + field.required, + field.skip.serialization_if, + is_union_of(field.type, UndefinedType) or default is Undefined, + (is_union_of(field.type, NoneType) and self.exclude_none) + or field.none_as_undefined + or (default is None and self.exclude_defaults), + (field.skip.serialization_default or self.exclude_defaults) + and default not in (None, Undefined), + default, + identity_as_none(self.visit_with_conv(field.type, field.serialization)), + field.ordering, + ) + for field in fields + for default in [... if field.required else field.get_default()] + ] + [ + ( + serialized.func.__name__, + self.aliaser(serialized.alias), + serialized.func, + True, + None, + is_union_of(ret_type, UndefinedType), + is_union_of(ret_type, NoneType) and self.exclude_none, + False, + ..., + self.visit_with_conv(ret_type, serialized.conversion), + serialized.ordering, + ) + for serialized, types in get_serialized_methods(tp) + for ret_type in [types["return"]] + ] + serialization_fields = sort_by_order( # type: ignore + cls, serialization_fields, lambda f: f[0], lambda f: f[-1] + ) + field_names = {f.name for f in fields} + any_method = self.any() + exclude_unset = self.exclude_unset and support_fields_set(cls) + additional_properties = self.additional_properties and typed_dict + + def method(obj: Any) -> Any: + result = {} + for ( + name, + alias, + get_field, + required, + skip_if, + undefined, + skip_none, + skip_default, + default, + serialize_field, + _, + ) in serialization_fields: + if (not exclude_unset or name in getattr(obj, FIELDS_SET_ATTR)) and ( + not typed_dict or required or name in obj + ): + field_value = get_field(obj) + if not ( + (skip_if and skip_if(field_value)) + or (undefined and field_value is Undefined) + or (skip_none and field_value is None) + or (skip_default and field_value == default) + ): + if serialize_field: + field_value = serialize_field(field_value) + if alias: + result[alias] = field_value + else: + result.update(field_value) + if additional_properties: + assert isinstance(obj, Mapping) + for key, value in obj.items(): + if key not in field_names and isinstance(key, str): + result[key] = any_method(value) + return result + + return self._wrap(cls, method) + + def primitive(self, cls: Type) -> SerializationMethod: + return self._wrap(cls, identity) + + def subprimitive(self, cls: Type, superclass: Type) -> SerializationMethod: + if cls is AliasedStr: + return self.aliaser + else: + return super().subprimitive(cls, superclass) + + def tuple(self, types: Sequence[AnyType]) -> SerializationMethod: + elt_serializers = list(enumerate(map(self.visit, types))) + if all(method is identity for _, method in elt_serializers): + return identity if self.pass_through_options.tuple else list # type: ignore + + def method(obj: Any) -> Any: + return [serialize_elt(obj[i]) for i, serialize_elt in elt_serializers] + + if self.check_type: + nb_elts = len(elt_serializers) + wrapped = method + fall_back_on_any, as_list = self.fall_back_on_any, self._factory(list) + + def method(obj: Any) -> Any: + if len(obj) == nb_elts: + return wrapped(obj) + elif fall_back_on_any: + return as_list(obj) + else: + raise TypeError(f"Expected {nb_elts}-tuple, found {len(obj)}-tuple") + + return self._wrap(tuple, method) + + def union(self, alternatives: Sequence[AnyType]) -> SerializationMethod: + methods = [] + for tp in alternatives: + with suppress(Unsupported): + methods.append((self.visit(tp), *instance_checker(tp))) + # No need to catch the case with all methods being identity, + # because passthrough + if not methods: + raise Unsupported(Union[tuple(alternatives)]) # type: ignore + elif len(methods) == 1: + return methods[0][0] + elif all(method is identity for method, _, _ in methods): + return identity + elif len(methods) == 2 and NoneType in alternatives: + serialize_alt = next(meth for meth, _, arg in methods if arg is not None) + + def method(obj: Any) -> Any: + return serialize_alt(obj) if obj is not None else None + + else: + fallback = self._any_fallback(Union[alternatives]) + + def method(obj: Any) -> Any: + for serialize_alt, check, arg in methods: + if check(obj, arg): + try: + return serialize_alt(obj) + except Exception: + pass + return fallback(obj) + + return method + + def unsupported(self, tp: AnyType) -> SerializationMethod: + try: + return super().unsupported(tp) + except Unsupported: + if self.fall_back_on_any and is_type(tp): + if issubclass(tp, Mapping): + return self.visit(Mapping[Any, Any]) + elif issubclass(tp, Collection): + return self.visit(Collection[Any]) + raise + + def _visit_conversion( + self, + tp: AnyType, + conversion: Serialization, + dynamic: bool, + next_conversion: Optional[AnyConversion], + ) -> SerializationMethod: + serialize_conv = self.visit_with_conv( + conversion.target, sub_conversion(conversion, next_conversion) + ) + converter = cast(Converter, conversion.converter) + if converter is identity: + method = serialize_conv + elif serialize_conv is identity: + method = converter + else: + + def method(obj: Any) -> Any: + return serialize_conv(converter(obj)) + + return self._wrap(get_origin_or_type(tp), method) + + def visit_conversion( + self, + tp: AnyType, + conversion: Optional[Serialization], + dynamic: bool, + next_conversion: Optional[AnyConversion] = None, + ) -> SerializationMethod: + if not dynamic and self.pass_through_type(tp): + return identity + else: + return super().visit_conversion(tp, conversion, dynamic, next_conversion) + + +@cache +def serialization_method_factory( + additional_properties: bool, + aliaser: Aliaser, + check_type: bool, + conversion: Optional[AnyConversion], + default_conversion: DefaultConversion, + exclude_defaults: bool, + exclude_none: bool, + exclude_unset: bool, + fall_back_on_any: bool, + pass_through: PassThroughOptions, +) -> SerializationMethodFactory: + @lru_cache() + def factory(tp: AnyType) -> SerializationMethod: + return SerializationMethodVisitor( + additional_properties, + aliaser, + check_type, + default_conversion, + exclude_defaults, + exclude_none, + exclude_unset, + fall_back_on_any, + pass_through, + ).visit_with_conv(tp, conversion) + + return factory + + +def serialization_method( + type: AnyType, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + check_type: bool = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + exclude_defaults: bool = None, + exclude_none: bool = None, + exclude_unset: bool = None, + fall_back_on_any: bool = None, + pass_through: PassThroughOptions = None, +) -> SerializationMethod: + from apischema import settings + + return serialization_method_factory( + opt_or(additional_properties, settings.additional_properties), + opt_or(aliaser, settings.aliaser), + opt_or(check_type, settings.serialization.check_type), + conversion, + opt_or(default_conversion, settings.serialization.default_conversion), + opt_or(exclude_defaults, settings.serialization.exclude_defaults), + opt_or(exclude_none, settings.serialization.exclude_none), + opt_or(exclude_unset, settings.serialization.exclude_unset), + opt_or(fall_back_on_any, settings.serialization.fall_back_on_any), + opt_or(pass_through, settings.serialization.pass_through), + )(type) + + +NO_OBJ = object() + + +@overload +def serialize( + type: AnyType, + obj: Any, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + check_type: bool = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + exclude_defaults: bool = None, + exclude_none: bool = None, + exclude_unset: bool = None, + fall_back_on_any: bool = None, + pass_through: PassThroughOptions = None, +) -> Any: + ... + + +@overload +def serialize( + obj: Any, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + check_type: bool = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + exclude_defaults: bool = None, + exclude_none: bool = None, + exclude_unset: bool = None, + fall_back_on_any: bool = True, + pass_through: PassThroughOptions = None, +) -> Any: + ... + + +@deprecate_kwargs({"conversions": "conversion"}) # type: ignore +def serialize( + type: AnyType = Any, + obj: Any = NO_OBJ, + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + check_type: bool = None, + conversion: AnyConversion = None, + default_conversion: DefaultConversion = None, + exclude_defaults: bool = None, + exclude_none: bool = None, + exclude_unset: bool = None, + fall_back_on_any: bool = None, + pass_through: PassThroughOptions = None, +) -> Any: + # Handle overloaded signature without type + if obj is NO_OBJ: + type, obj = Any, type + if fall_back_on_any is None: + fall_back_on_any = True + return serialization_method( + type, + additional_properties=additional_properties, + aliaser=aliaser, + check_type=check_type, + conversion=conversion, + default_conversion=default_conversion, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + exclude_unset=exclude_unset, + fall_back_on_any=fall_back_on_any, + pass_through=pass_through, + )(obj) + + +def serialization_default( + *, + additional_properties: bool = None, + aliaser: Aliaser = None, + default_conversion: DefaultConversion = None, + exclude_defaults: bool = None, + exclude_none: bool = None, + exclude_unset: bool = None, +) -> SerializationMethod: + from apischema import settings + + factory = serialization_method_factory( + opt_or(additional_properties, settings.additional_properties), + opt_or(aliaser, settings.aliaser), + False, + None, + opt_or(default_conversion, settings.serialization.default_conversion), + opt_or(exclude_defaults, settings.serialization.exclude_defaults), + opt_or(exclude_none, settings.serialization.exclude_none), + opt_or(exclude_unset, settings.serialization.exclude_unset), + False, + PassThroughOptions(any=True), + ) + + def method(obj: Any) -> Any: + return factory(obj.__class__)(obj) + + return method diff --git a/.venv/lib/python3.9/site-packages/apischema/serialization/serialized_methods.py b/.venv/lib/python3.9/site-packages/apischema/serialization/serialized_methods.py new file mode 100644 index 0000000..80e5726 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/serialization/serialized_methods.py @@ -0,0 +1,175 @@ +from collections import defaultdict +from dataclasses import dataclass +from functools import wraps +from inspect import Parameter, signature +from typing import ( + Any, + Callable, + Collection, + Dict, + Mapping, + MutableMapping, + NoReturn, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from apischema.cache import CacheAwareDict +from apischema.conversions.conversions import AnyConversion +from apischema.methods import method_registerer +from apischema.ordering import Ordering +from apischema.schemas import Schema +from apischema.types import AnyType, Undefined, UndefinedType +from apischema.typing import generic_mro, get_type_hints, is_type +from apischema.utils import ( + deprecate_kwargs, + get_args2, + get_origin_or_type, + get_origin_or_type2, + substitute_type_vars, + subtyping_substitution, +) + + +@dataclass(frozen=True) +class SerializedMethod: + func: Callable + alias: str + conversion: Optional[AnyConversion] + error_handler: Optional[Callable] + ordering: Optional[Ordering] + schema: Optional[Schema] + + def error_type(self) -> AnyType: + assert self.error_handler is not None + types = get_type_hints(self.error_handler, include_extras=True) + if "return" not in types: + raise TypeError("Error handler must be typed") + return types["return"] + + def return_type(self, return_type: AnyType) -> AnyType: + if self.error_handler is not None: + error_type = self.error_type() + if error_type is not NoReturn: + return Union[return_type, error_type] + return return_type + + def types(self, owner: AnyType = None) -> Mapping[str, AnyType]: + types = get_type_hints(self.func, include_extras=True) + if "return" not in types: + if is_type(self.func): + types["return"] = self.func + else: + raise TypeError("Function must be typed") + types["return"] = self.return_type(types["return"]) + if get_args2(owner): + first_param = next(iter(signature(self.func).parameters)) + substitution, _ = subtyping_substitution( + types.get(first_param, get_origin_or_type2(owner)), owner + ) + types = { + name: substitute_type_vars(tp, substitution) + for name, tp in types.items() + } + return types + + +_serialized_methods: MutableMapping[Type, Dict[str, SerializedMethod]] = CacheAwareDict( + defaultdict(dict) +) + +S = TypeVar("S", bound=SerializedMethod) + + +def _get_methods( + tp: AnyType, all_methods: Mapping[Type, Mapping[str, S]] +) -> Collection[Tuple[S, Mapping[str, AnyType]]]: + result = {} + for base in reversed(generic_mro(tp)): + for name, method in all_methods[get_origin_or_type(base)].items(): + result[name] = (method, method.types(base)) + return result.values() + + +def get_serialized_methods( + tp: AnyType, +) -> Collection[Tuple[SerializedMethod, Mapping[str, AnyType]]]: + return _get_methods(tp, _serialized_methods) + + +ErrorHandler = Union[Callable, None, UndefinedType] + + +def none_error_handler(error: Exception, obj: Any, alias: str) -> None: + return None + + +MethodOrProp = TypeVar("MethodOrProp", Callable, property) + + +@overload +def serialized(__method_or_property: MethodOrProp) -> MethodOrProp: + ... + + +@overload +def serialized( + alias: str = None, + *, + conversion: AnyConversion = None, + error_handler: ErrorHandler = Undefined, + order: Optional[Ordering] = None, + schema: Schema = None, + owner: Type = None, +) -> Callable[[MethodOrProp], MethodOrProp]: + ... + + +@deprecate_kwargs({"conversions": "conversion"}) +def serialized( + __arg=None, + *, + alias: str = None, + conversion: AnyConversion = None, + error_handler: ErrorHandler = Undefined, + order: Optional[Ordering] = None, + schema: Schema = None, + owner: Type = None, +): + def register(func: Callable, owner: Type, alias2: str): + alias2 = alias or alias2 + parameters = list(signature(func).parameters.values()) + for param in parameters[1:]: + if ( + param.kind not in {Parameter.VAR_POSITIONAL, Parameter.VAR_KEYWORD} + and param.default is Parameter.empty + ): + raise TypeError("Serialized method cannot have required parameter") + error_handler2 = error_handler + if error_handler is None: + error_handler2 = none_error_handler + if error_handler2 is Undefined: + error_handler2 = None + else: + wrapped = func + + @wraps(wrapped) + def func(self): + try: + return wrapped(self) + except Exception as error: + return error_handler(error, self, alias2) + + assert not isinstance(error_handler2, UndefinedType) + _serialized_methods[owner][alias2] = SerializedMethod( + func, alias2, conversion, error_handler2, order, schema + ) + + if isinstance(__arg, str): + alias = __arg + __arg = None + return method_registerer(__arg, owner, register) diff --git a/.venv/lib/python3.9/site-packages/apischema/settings.py b/.venv/lib/python3.9/site-packages/apischema/settings.py new file mode 100644 index 0000000..c4a26df --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/settings.py @@ -0,0 +1,82 @@ +import warnings +from inspect import Parameter +from typing import Callable, Optional, Sequence + +from apischema import cache +from apischema.aliases import Aliaser +from apischema.conversions.conversions import DefaultConversion +from apischema.conversions.converters import ( + default_deserialization, + default_serialization, +) +from apischema.deserialization.coercion import Coercer, coerce as coerce_ +from apischema.json_schema import JsonSchemaVersion +from apischema.objects import ObjectField +from apischema.objects.fields import default_object_fields as default_object_fields_ +from apischema.schemas import Schema +from apischema.serialization import PassThroughOptions +from apischema.type_names import TypeName, default_type_name as default_type_name_ +from apischema.types import AnyType +from apischema.utils import to_camel_case + + +class ResetCache(type): + def __setattr__(self, name, value): + super().__setattr__(name, value) + cache.reset() + + +class MetaSettings(ResetCache): + @property + def camel_case(self) -> bool: + raise NotImplementedError + + @camel_case.setter + def camel_case(self, value: bool): + settings.aliaser = to_camel_case if value else lambda s: s + + def __setattr__(self, name, value): + if name == "default_schema" and not isinstance(value, ResetCache): + warnings.warn( + "settings.default_schema is deprecated," + " use settings.base_schema.type instead", + DeprecationWarning, + ) + assert self is settings + self.base_schema.type = value # type: ignore + else: + super().__setattr__(name, value) + + +class settings(metaclass=MetaSettings): + additional_properties: bool = False + aliaser: Aliaser = lambda s: s + default_object_fields: Callable[ + [type], Optional[Sequence[ObjectField]] + ] = default_object_fields_ + default_schema: Callable[[AnyType], Optional[Schema]] = lambda *_: None + default_type_name: Callable[[AnyType], Optional[TypeName]] = default_type_name_ + json_schema_version: JsonSchemaVersion = JsonSchemaVersion.DRAFT_2020_12 + + class base_schema: + field: Callable[[AnyType, str, str], Optional[Schema]] = lambda *_: None + method: Callable[[AnyType, Callable, str], Optional[Schema]] = lambda *_: None + parameter: Callable[ + [Callable, Parameter, str], Optional[Schema] + ] = lambda *_: None + type: Callable[[AnyType], Optional[Schema]] = lambda *_: None + + class deserialization(metaclass=ResetCache): + coerce: bool = False + coercer: Coercer = coerce_ + default_conversion: DefaultConversion = default_deserialization + fall_back_on_default: bool = False + + class serialization(metaclass=ResetCache): + check_type: bool = False + fall_back_on_any: bool = False + default_conversion: DefaultConversion = default_serialization + exclude_defaults: bool = False + exclude_none: bool = False + exclude_unset: bool = True + pass_through: PassThroughOptions = PassThroughOptions() diff --git a/.venv/lib/python3.9/site-packages/apischema/skip.py b/.venv/lib/python3.9/site-packages/apischema/skip.py new file mode 100644 index 0000000..21b6f5b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/skip.py @@ -0,0 +1,19 @@ +__all__ = ["NotNull", "Skip"] +from typing import TypeVar, Union + +from apischema.visitor import Unsupported + +Skip = Unsupported + +T = TypeVar("T") +try: + from apischema.typing import Annotated + + NotNull = Union[T, Annotated[None, Skip]] +except ImportError: + + class _NotNull: + def __getitem__(self, item): + raise TypeError("NotNull requires Annotated (PEP 593)") + + NotNull = _NotNull() # type: ignore diff --git a/.venv/lib/python3.9/site-packages/apischema/std_types.py b/.venv/lib/python3.9/site-packages/apischema/std_types.py new file mode 100644 index 0000000..2477cee --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/std_types.py @@ -0,0 +1,99 @@ +import operator +import re +import sys +from base64 import b64decode, b64encode +from collections import deque +from datetime import date, datetime, time +from decimal import Decimal +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, +) +from pathlib import ( + Path, + PosixPath, + PurePath, + PurePosixPath, + PureWindowsPath, + WindowsPath, +) +from typing import Deque, List, TypeVar +from uuid import UUID + +from apischema import deserializer, schema, serializer, type_name +from apischema.conversions import Conversion, as_str + +T = TypeVar("T") + + +# =================== bytes ===================== + +deserializer(Conversion(b64decode, source=str, target=bytes)) + + +@serializer +def to_base64(b: bytes) -> str: + return b64encode(b).decode() + + +type_name(graphql="Bytes")(bytes) +schema(encoding="base64")(bytes) + + +# ================ collections ================== + +deserializer(Conversion(deque, source=List[T], target=Deque[T])) +serializer(Conversion(list, source=Deque[T], target=List[T])) +if sys.version_info < (3, 7): + deserializer(Conversion(deque, source=List, target=deque)) + serializer(Conversion(list, source=deque, target=List)) + + +# ================== datetime =================== + +if sys.version_info >= (3, 7): # pragma: no cover + for cls, format in [(date, "date"), (datetime, "date-time"), (time, "time")]: + deserializer(Conversion(cls.fromisoformat, source=str, target=cls)) # type: ignore + serializer(Conversion(cls.isoformat, source=cls, target=str)) # type: ignore + type_name(graphql=cls.__name__.capitalize())(cls) + schema(format=format)(cls) + +# ================== decimal ==================== + +deserializer(Conversion(Decimal, source=float, target=Decimal)) +serializer(Conversion(float, source=Decimal, target=float)) +type_name(None)(Decimal) + +# ================= ipaddress =================== + +for classes, format in [ + ((IPv4Address, IPv4Interface, IPv4Network), "ipv4"), + ((IPv6Address, IPv6Interface, IPv6Network), "ipv6"), +]: + for cls in classes: + as_str(cls) + type_name(graphql=cls.__name__)(cls) + schema(format=format)(cls) + +# ==================== path ===================== + +for cls in (PurePath, PurePosixPath, PureWindowsPath, Path, PosixPath, WindowsPath): + as_str(cls) + type_name(None)(cls) + +# =================== pattern =================== + +Pattern = type(re.compile(r"")) +deserializer(Conversion(re.compile, source=str, target=Pattern)) +serializer(Conversion(operator.attrgetter("pattern"), source=Pattern, target=str)) +type_name(None)(Pattern) + +# ==================== uuid ===================== + +as_str(UUID) +type_name(graphql="UUID") +schema(format="uuid")(UUID) diff --git a/.venv/lib/python3.9/site-packages/apischema/tagged_unions.py b/.venv/lib/python3.9/site-packages/apischema/tagged_unions.py new file mode 100644 index 0000000..0ba97e9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/tagged_unions.py @@ -0,0 +1,161 @@ +__all__ = ["Tagged", "TaggedUnion", "get_tagged"] + +import warnings +from dataclasses import InitVar, dataclass, field +from typing import ( + Any, + ClassVar, + Generic, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from apischema.aliases import alias as alias_metadata +from apischema.conversions.conversions import ConvOrFunc +from apischema.metadata import conversion +from apischema.metadata.keys import ( + DEFAULT_AS_SET_METADATA, + FALL_BACK_ON_DEFAULT_METADATA, + FLATTEN_METADATA, + INIT_VAR_METADATA, + POST_INIT_METADATA, + PROPERTIES_METADATA, + REQUIRED_METADATA, + SKIP_METADATA, +) +from apischema.schemas import Schema, schema +from apischema.types import Metadata, MetadataImplem, Undefined, UndefinedType +from apischema.typing import get_type_hints +from apischema.utils import PREFIX, get_args2, get_origin2, wrap_generic_init_subclass + +TAGS_ATTR = f"{PREFIX}tags" + +T = TypeVar("T", bound="TaggedUnion") +V = TypeVar("V") + + +class Tag(str, Generic[T, V]): + def __new__(cls, tag: str, type: Type[T]): + return super().__new__(cls, tag) + + def __init__(self, tag: str, type: Type[T]): + super().__init__() + self.type = type + + def __call__(self, value: V) -> T: + return self.type(**{self: value}) # type: ignore + + +INVALID_METADATA = { + DEFAULT_AS_SET_METADATA, + FALL_BACK_ON_DEFAULT_METADATA, + INIT_VAR_METADATA, + FLATTEN_METADATA, + POST_INIT_METADATA, + PROPERTIES_METADATA, + REQUIRED_METADATA, + SKIP_METADATA, +} + + +@dataclass(frozen=True) +class Tagged(Generic[V]): + metadata: Metadata = field(default_factory=MetadataImplem) + alias: InitVar[Optional[str]] = None + schema: InitVar[Optional[Schema]] = None + deserialization: InitVar[Optional[ConvOrFunc]] = None + serialization: InitVar[Optional[ConvOrFunc]] = None + + def __post_init__( + self, + alias: Optional[str], + schema: Optional[Schema], + deserialization: Optional[ConvOrFunc], + serialization: Optional[ConvOrFunc], + ): + if self.metadata is None or isinstance(self.metadata, str): + raise TypeError( + "Tagged alias parameter is deprecated, use metadata instead" + ) + if any(m is not None for m in (alias, schema, deserialization, serialization)): + metadata = self.metadata + warnings.warn( + "Tagged keyword parameters are deprecated," + " use metadata parameter instead", + DeprecationWarning, + ) + if alias is not None: + metadata |= alias_metadata(alias) + if schema is not None: + metadata |= schema + if deserialization is not None or serialization is not None: + metadata |= conversion(deserialization, serialization) + object.__setattr__(self, "metadata", metadata) + if self.metadata.keys() & INVALID_METADATA: + raise TypeError("Invalid metadata in a TaggedUnion field") + + @overload + def __get__(self, instance: None, owner: Type[T]) -> Tag[T, V]: + ... + + @overload + def __get__(self, instance: Any, owner) -> Union[V, UndefinedType]: + ... + + def __get__(self, instance, owner): + raise NotImplementedError + + +class TaggedUnion: + def __init__(self, **kwargs): + if len(kwargs) != 1: + raise ValueError("TaggedUnion constructor expects only one field") + tags = getattr(self, TAGS_ATTR) + for tag in tags: + setattr(self, tag, Undefined) + for tag, value in kwargs.items(): + if tag not in tags: + raise TypeError(f"{type(self)} has no tag {tag}") + setattr(self, tag, value) + + def __repr__(self): + tag, value = get_tagged(self) + return f"{type(self).__name__}({tag}={value!r})" + + @wrap_generic_init_subclass + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + tags = set(getattr(cls, TAGS_ATTR, ())) + types = get_type_hints(cls, include_extras=True) + for tag, tp in types.items(): + if get_origin2(tp) == Tagged: + tagged = cls.__dict__.get(tag, Tagged()) + setattr(cls, tag, field(default=Undefined, metadata=tagged.metadata)) + cls.__annotations__[tag] = Union[ + get_args2(types[tag])[0], UndefinedType + ] + tags.add(tag) + elif tag not in tags: + if get_origin2(tp) != ClassVar: + cls.__annotations__[tag] = ClassVar[tp] + else: + raise TypeError( + "Only Tagged or ClassVar fields are allowed in TaggedUnion" + ) + setattr(cls, TAGS_ATTR, tags) + schema(min_props=1, max_props=1)(dataclass(init=False, repr=False)(cls)) + for tag in tags: + setattr(cls, tag, Tag(tag, cls)) + + +def get_tagged(tagged_union: TaggedUnion) -> Tuple[str, Any]: + defined = { + tag: getattr(tagged_union, tag) + for tag in getattr(tagged_union, TAGS_ATTR) + if getattr(tagged_union, tag) is not Undefined + } + return next(iter(defined.items())) diff --git a/.venv/lib/python3.9/site-packages/apischema/type_names.py b/.venv/lib/python3.9/site-packages/apischema/type_names.py new file mode 100644 index 0000000..efd443c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/type_names.py @@ -0,0 +1,110 @@ +import collections.abc +import warnings +from contextlib import suppress +from dataclasses import dataclass +from typing import Any, Callable, MutableMapping, NamedTuple, Optional, TypeVar, Union + +from apischema.cache import CacheAwareDict +from apischema.types import AnyType, PRIMITIVE_TYPES +from apischema.typing import get_args, get_origin, is_named_tuple, is_type_var +from apischema.utils import has_type_vars, merge_opts, replace_builtins + + +class TypeName(NamedTuple): + json_schema: Optional[str] = None + graphql: Optional[str] = None + + +NameOrFactory = Union[str, None, Callable[..., Optional[str]]] + + +def _apply_args(name_or_factory: NameOrFactory, *args) -> Optional[str]: + return name_or_factory(*args) if callable(name_or_factory) else name_or_factory + + +_type_names: MutableMapping[AnyType, "TypeNameFactory"] = CacheAwareDict({}) + +T = TypeVar("T") + + +@dataclass(frozen=True) +class TypeNameFactory: + json_schema: NameOrFactory + graphql: NameOrFactory + + def __call__(self, tp: T) -> T: + self.check_type(tp) + _type_names[replace_builtins(tp)] = self + return tp + + def check_type(self, tp: AnyType): + if is_type_var(tp): + raise TypeError("TypeVar cannot have a type_name") + if has_type_vars(tp): + if get_args(tp): + raise TypeError("Generic alias cannot have a type_name") + elif isinstance(self.json_schema, str) or isinstance(self.graphql, str): + raise TypeError( + "Unspecialized generic type must used factory type_name" + ) + + def to_type_name(self, tp: AnyType, *args) -> TypeName: + self.check_type(tp) + return TypeName( + _apply_args(self.json_schema, tp, *args), + _apply_args(self.graphql, tp, *args), + ) + + +def type_name( + ref: NameOrFactory = None, + *, + json_schema: NameOrFactory = None, + graphql: NameOrFactory = None, +) -> TypeNameFactory: + return TypeNameFactory(json_schema or ref, graphql or ref) + + +no_type_name = {*PRIMITIVE_TYPES, Any} + + +def default_type_name(tp: AnyType) -> Optional[TypeName]: + if ( + hasattr(tp, "__name__") + and not get_args(tp) + and not has_type_vars(tp) + and tp not in no_type_name + and ( + not isinstance(tp, type) + or not issubclass(tp, collections.abc.Collection) + or is_named_tuple(tp) + ) + ): + return TypeName(tp.__name__, tp.__name__) + else: + return None + + +def get_type_name(tp: AnyType) -> TypeName: + from apischema import settings + + tp = replace_builtins(tp) + with suppress(KeyError, TypeError): + return _type_names[tp].to_type_name(tp) + origin, args = get_origin(tp), get_args(tp) + if args and not has_type_vars(tp): + with suppress(KeyError, TypeError): + return _type_names[origin].to_type_name(origin, *args) + return settings.default_type_name(tp) or TypeName() + + +@merge_opts +def merge_type_name(default: TypeName, override: TypeName) -> TypeName: + return TypeName( + override.json_schema or default.json_schema, override.graphql or default.graphql + ) + + +def schema_ref(ref: Optional[str]) -> Callable[[T], T]: + warnings.warn("schema_ref is deprecated, use type_name instead", DeprecationWarning) + return type_name(ref) diff --git a/.venv/lib/python3.9/site-packages/apischema/types.py b/.venv/lib/python3.9/site-packages/apischema/types.py new file mode 100644 index 0000000..0064792 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/types.py @@ -0,0 +1,122 @@ +import collections.abc +import sys +from enum import Enum, auto +from itertools import chain +from types import MappingProxyType +from typing import ( + AbstractSet, + Any, + Collection, + Dict, + FrozenSet, + List, + Mapping, + MutableMapping, + MutableSequence, + MutableSet, + Sequence, + Set, + TYPE_CHECKING, + Tuple, + Type, + Union, +) + +AnyType = Any +NoneType: Type[None] = type(None) +Number = Union[int, float] + +PRIMITIVE_TYPES = (str, int, bool, float, NoneType) +COLLECTION_TYPES = ( + Collection, + collections.abc.Collection, + Sequence, + collections.abc.Sequence, + Tuple, + tuple, + MutableSequence, + collections.abc.MutableSequence, + List, + list, + AbstractSet, + collections.abc.Set, + FrozenSet, + frozenset, + MutableSet, + collections.abc.MutableSet, + Set, + set, +) +MAPPING_TYPES = ( + Mapping, + collections.abc.Mapping, + MutableMapping, + collections.abc.MutableMapping, + Dict, + dict, + MappingProxyType, +) + + +if sys.version_info >= (3, 7): # pragma: no cover + OrderedDict = dict + ChainMap = collections.ChainMap +else: # pragma: no cover + OrderedDict = collections.OrderedDict + + class ChainMap(collections.ChainMap): + def __iter__(self): + return iter({k: None for k in chain.from_iterable(reversed(self.maps))}) + + +class Metadata(Mapping[str, Any]): + def __or__(self, other: Mapping[str, Any]) -> "Metadata": + return MetadataImplem({**self, **other}) + + def __ror__(self, other: Mapping[str, Any]) -> "Metadata": + return MetadataImplem({**other, **self}) + + +class MetadataMixin(Metadata): + key: str + + def __getitem__(self, key): + if key != self.key: + raise KeyError(key) + return self + + def __iter__(self): + return iter((self.key,)) + + def __len__(self): + return 1 + + +class MetadataImplem(dict, Metadata): # type: ignore + def __hash__(self): + return hash(tuple(sorted(self.items()))) + + +# Singleton type, see https://www.python.org/dev/peps/pep-0484/#id30 +if TYPE_CHECKING: + + class UndefinedType(Enum): + Undefined = auto() + + Undefined = UndefinedType.Undefined +else: + + class UndefinedType: + def __new__(cls): + return Undefined + + def __repr__(self): + return "Undefined" + + def __str__(self): + return "Undefined" + + def __bool__(self): + return False + + Undefined = object.__new__(UndefinedType) diff --git a/.venv/lib/python3.9/site-packages/apischema/typing.py b/.venv/lib/python3.9/site-packages/apischema/typing.py new file mode 100644 index 0000000..6e9c763 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/typing.py @@ -0,0 +1,303 @@ +"""Kind of typing_extensions for this package""" +__all__ = ["get_args", "get_origin", "get_type_hints"] + +import sys +from types import ModuleType, new_class +from typing import ( # type: ignore + Any, + Callable, + Collection, + Dict, + Generic, + Set, + Tuple, + Type, + TypeVar, + Union, +) + + +class _FakeType: + pass + + +if sys.version_info >= (3, 9): # pragma: no cover + from typing import Annotated, TypedDict, get_type_hints, get_origin, get_args +else: # pragma: no cover + try: + from typing_extensions import Annotated, TypedDict + except ImportError: + if sys.version_info >= (3, 8): + from typing import TypedDict + try: + from typing_extensions import get_type_hints as gth + except ImportError: + from typing import get_type_hints as _gth + + def gth(obj, globalns=None, localns=None, include_extras=False): # type: ignore + return _gth(obj, globalns, localns) + + def get_type_hints( # type: ignore + obj, globalns=None, localns=None, include_extras=False + ): + # TODO This has been fixed in recent 3.7 and 3.8 + # fix https://bugs.python.org/issue37838 + if not isinstance(obj, (type, ModuleType)) and globalns is None: + nsobj = obj + while hasattr(nsobj, "__wrapped__"): + nsobj = nsobj.__wrapped__ + globalns = getattr(nsobj, "__globals__", None) + localns = {"unicode": str, **(localns or {})} + return gth(obj, globalns, localns, include_extras) + + try: + from typing_extensions import get_origin, get_args + except ImportError: + + def _assemble_tree(tree: Tuple[Any]) -> Any: + if not isinstance(tree, tuple): + return tree + else: + origin, *args = tree # type: ignore + if origin is Annotated: + return Annotated[(_assemble_tree(args[0]), *args[1])] + else: + return origin[tuple(map(_assemble_tree, args))] + + def get_origin(tp): # type: ignore + # In Python 3.6: List[Collection[T]][int].__args__ == int != Collection[int] + if hasattr(tp, "_subs_tree"): + tp = _assemble_tree(tp._subs_tree()) + if isinstance(tp, _AnnotatedAlias): + return None if tp.__args__ is None else Annotated + if tp is Generic: + return Generic + return getattr(tp, "__origin__", None) + + def get_args(tp): # type: ignore + # In Python 3.6: List[Collection[T]][int].__args__ == int != Collection[int] + if hasattr(tp, "_subs_tree"): + tp = _assemble_tree(tp._subs_tree()) + if isinstance(tp, _AnnotatedAlias): + return () if tp.__args__ is None else (tp.__args__[0], *tp.__metadata__) + # __args__ can be None in 3.6 inside __set_name__ + res = getattr(tp, "__args__", ()) or () + if get_origin(tp) is Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + + +if sys.version_info >= (3, 8): # pragma: no cover + from typing import Literal, Protocol # noqa: F401 +else: # pragma: no cover + try: + from typing_extensions import Literal, Protocol # noqa: F401 + except ImportError: + pass + +if sys.version_info >= (3, 7): + from typing import _collect_type_vars, ForwardRef # type: ignore +else: + from typing import _type_vars, _ForwardRef + + _collect_type_vars = _type_vars + + def ForwardRef(arg, is_argument): + return _ForwardRef(arg) + + +try: + from typing import _strip_annotations # type: ignore +except ImportError: + try: + from typing_extensions import _strip_annotations # type: ignore + except ImportError: + + def _strip_annotations(t): + return t + + +def _generic_mro(result, tp): + origin = get_origin(tp) + if origin is None: + origin = tp + result[origin] = tp + if hasattr(origin, "__orig_bases__"): + parameters = _collect_type_vars(origin.__orig_bases__) + substitution = dict(zip(parameters, get_args(tp))) + for base in origin.__orig_bases__: + if get_origin(base) in result: + continue + base_parameters = getattr(base, "__parameters__", ()) + if base_parameters: + base = base[tuple(substitution.get(p, p) for p in base_parameters)] + _generic_mro(result, base) + + +# sentinel value to avoid to subscript Generic and Protocol +try: + BASE_GENERIC_MRO = {Generic: Generic, Protocol: Protocol} +except NameError: + BASE_GENERIC_MRO = {Generic: Generic} + + +def generic_mro(tp): + origin = get_origin(tp) + if origin is None and not hasattr(tp, "__orig_bases__"): + if not isinstance(tp, type): + raise TypeError(f"{tp!r} is not a type or a generic alias") + return tp.__mro__ + result = BASE_GENERIC_MRO.copy() + _generic_mro(result, tp) + cls = origin if origin is not None else tp + return tuple(result.get(sub_cls, sub_cls) for sub_cls in cls.__mro__) + + +def resolve_type_hints(obj: Any) -> Dict[str, Any]: + """Wrap get_type_hints to resolve type vars in case of generic inheritance. + + `obj` can also be a parametrized generic class.""" + origin_or_obj = get_origin(obj) or obj + if isinstance(origin_or_obj, type): + hints = {} + for base in reversed(generic_mro(obj)): + base_origin = get_origin(base) or base + base_annotations = getattr(base_origin, "__dict__", {}).get( + "__annotations__", {} + ) + substitution = dict( + zip(getattr(base_origin, "__parameters__", ()), get_args(base)) + ) + for name, hint in get_type_hints(base_origin, include_extras=True).items(): + if name not in base_annotations: + continue + if isinstance(hint, TypeVar): + hints[name] = substitution.get(hint, hint) + elif getattr(hint, "__parameters__", ()): + hints[name] = (Union if is_union(hint) else hint)[ + tuple(substitution.get(p, p) for p in hint.__parameters__) + ] + else: + hints[name] = hint + return hints + else: + return get_type_hints(obj, include_extras=True) + + +_T = TypeVar("_T") +_GenericAlias: Any = type(Generic[_T]) +try: + _AnnotatedAlias: Any = type(Annotated[_T, ...]) +except NameError: + _AnnotatedAlias = _FakeType +try: + + class _TypedDictImplem(TypedDict): + pass + + _LiteralMeta: Any = type(Literal) + _TypedDictMeta: Any = type(_TypedDictImplem) +except NameError: + _LiteralMeta, _TypedDictMeta = _FakeType, _FakeType # type: ignore + + +def is_new_type(tp: Any) -> bool: + return hasattr(tp, "__supertype__") + + +def is_annotated(tp: Any) -> bool: + try: + from typing import Annotated # type: ignore + + return get_origin(tp) == Annotated + except ImportError: + try: + from typing_extensions import Annotated # type: ignore + + return get_origin(tp) == Annotated + except ImportError: + return False + + +def is_literal(tp: Any) -> bool: + try: + from typing import Literal + + return get_origin(tp) == Literal or isinstance(tp, type(Literal)) # py36 + except ImportError: + try: + from typing_extensions import Literal # type: ignore + + return get_origin(tp) == Literal or isinstance(tp, type(Literal)) # py36 + except ImportError: + return False + + +def is_named_tuple(tp: Any) -> bool: + return issubclass(tp, tuple) and hasattr(tp, "_fields") + + +def is_typed_dict(tp: Any) -> bool: + try: + from typing import TypedDict + + return isinstance(tp, type(new_class("_TypedDictImplem", (TypedDict,)))) + except ImportError: + try: + from typing_extensions import TypedDict # type: ignore + + return isinstance(tp, type(new_class("_TypedDictImplem", (TypedDict,)))) + except ImportError: + return False + + +def is_type_var(tp: Any) -> bool: + return isinstance(tp, TypeVar) # type: ignore + + +# Don't use sys.version_info because it can also depend of typing_extensions version +def required_keys(typed_dict: Type) -> Collection[str]: + assert is_typed_dict(typed_dict) + if hasattr(typed_dict, "__required_keys__"): + return typed_dict.__required_keys__ + else: + required: Set[str] = set() + bases_annotations: Set = set() + for base in typed_dict.__bases__: + if not isinstance(base, _TypedDictMeta): + continue + bases_annotations.update(base.__annotations__) + required.update(required_keys(base)) + if typed_dict.__total__: # type: ignore + required.update(typed_dict.__annotations__.keys() - bases_annotations) + return required + + +# py37/py38 get_origin of builtin wrapped generics return the unsubscriptable builtin +# type. +if (3, 7) <= sys.version_info < (3, 9): + import typing + + TYPING_ALIASES = { + getattr(elt, "__origin__", None): elt for elt in typing.__dict__.values() + } + + def typing_origin(origin: Any) -> Any: + return TYPING_ALIASES.get(origin, origin) + +else: + typing_origin = lambda tp: tp + + +def is_type(tp: Any) -> bool: + """isinstance is not enough because in py39: isinstance(list[int], type) == True""" + return isinstance(tp, type) and not get_args(tp) + + +def is_union(tp: Any) -> bool: + try: + from types import UnionType # type: ignore + + return tp in (UnionType, Union) + except ImportError: + return tp is Union diff --git a/.venv/lib/python3.9/site-packages/apischema/utils.py b/.venv/lib/python3.9/site-packages/apischema/utils.py new file mode 100644 index 0000000..33b187c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/utils.py @@ -0,0 +1,452 @@ +import collections.abc +import inspect +import re +import sys +import warnings +from contextlib import contextmanager, suppress +from dataclasses import dataclass, is_dataclass +from enum import Enum +from functools import wraps +from types import MappingProxyType +from typing import ( + AbstractSet, + Any, + Awaitable, + Callable, + Collection, + Container, + Dict, + Generic, + Hashable, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from apischema.types import ( + AnyType, + COLLECTION_TYPES, + MAPPING_TYPES, + OrderedDict, + PRIMITIVE_TYPES, +) +from apischema.typing import ( + _collect_type_vars, + generic_mro, + get_args, + get_origin, + get_type_hints, + is_annotated, + is_type_var, + is_union, + typing_origin, +) + +try: + from apischema.typing import Annotated +except ImportError: + Annotated = ... # type: ignore + +PREFIX = "_apischema_" + +T = TypeVar("T") +U = TypeVar("U") + + +def identity(x: T) -> T: + return x + + +Lazy = Callable[[], T] + + +@dataclass(frozen=True) # dataclass enable equality check +class LazyValue(Generic[T]): + default: T + + def __call__(self) -> T: + return self.default + + +if sys.version_info <= (3, 7): # pragma: no cover + is_dataclass_ = is_dataclass + + def is_dataclass(obj) -> bool: + return is_dataclass_(obj) and getattr(obj, "__origin__", None) is None + + +def is_hashable(obj: Any) -> bool: + return isinstance(obj, collections.abc.Hashable) + + +def opt_or(opt: Optional[T], default: U) -> Union[T, U]: + return opt if opt is not None else default + + +def to_hashable(data: Union[None, int, float, str, bool, list, dict]) -> Hashable: + if isinstance(data, list): + return tuple(map(to_hashable, data)) + if isinstance(data, dict): + return tuple(sorted((to_hashable(k), to_hashable(v)) for k, v in data.items())) + return data # type: ignore + + +SNAKE_CASE_REGEX = re.compile(r"_([a-z\d])") +CAMEL_CASE_REGEX = re.compile(r"[a-z\d]([A-Z])") + + +def to_camel_case(s: str) -> str: + return SNAKE_CASE_REGEX.sub(lambda m: m.group(1).upper(), s) + + +def to_snake_case(s: str) -> str: + return CAMEL_CASE_REGEX.sub(lambda m: "_" + m.group(1).lower(), s) + + +def to_pascal_case(s: str) -> str: + camel = to_camel_case(s) + return camel[0].upper() + camel[1:] if camel else camel + + +MakeDataclassField = Union[Tuple[str, AnyType], Tuple[str, AnyType, Any]] + + +def merge_opts( + func: Callable[[T, T], T] +) -> Callable[[Optional[T], Optional[T]], Optional[T]]: + def wrapper(opt1, opt2): + if opt1 is None: + return opt2 + if opt2 is None: + return opt1 + return func(opt1, opt2) + + return wrapper + + +K = TypeVar("K") +V = TypeVar("V") + + +@merge_opts +def merge_opts_mapping(m1: Mapping[K, V], m2: Mapping[K, V]) -> Mapping[K, V]: + return {**m1, **m2} + + +def has_type_vars(tp: AnyType) -> bool: + return is_type_var(tp) or bool(getattr(tp, "__parameters__", ())) + + +TV = AnyType # TypeVar is not supported as a type +# 10 should be enough for all builtin types +_type_vars = [TypeVar(f"T{i}") for i in range(10)] + + +def get_parameters(tp: AnyType) -> Iterable[TV]: + if hasattr(tp, "__parameters__"): + return tp.__parameters__ + elif hasattr(tp, "__orig_bases__"): + return _collect_type_vars(tp.__orig_bases__) + elif is_type_var(tp): + return (tp,) + else: + return _type_vars + + +def substitute_type_vars(tp: AnyType, substitution: Mapping[TV, AnyType]) -> AnyType: + if is_type_var(tp): + try: + return substitution[tp] + except KeyError: + return Union[tp.__constraints__] if tp.__constraints__ else Any + elif getattr(tp, "__parameters__", ()): + return (Union if is_union(tp) else tp)[ + tuple(substitution.get(p, p) for p in tp.__parameters__) + ] + else: + return tp + + +Func = TypeVar("Func", bound=Callable) + + +def typed_wraps(wrapped: Func) -> Callable[[Callable], Func]: + return cast(Func, wraps(wrapped)) + + +def is_subclass(tp: AnyType, base: AnyType) -> bool: + tp, base = get_origin_or_type(tp), get_origin_or_type(base) + return tp == base or ( + isinstance(tp, type) and isinstance(base, type) and issubclass(tp, base) + ) + + +def _annotated(tp: AnyType) -> AnyType: + return get_args(tp)[0] if is_annotated(tp) else tp + + +def get_origin_or_type(tp: AnyType) -> AnyType: + origin = get_origin(tp) + return origin if origin is not None else tp + + +def get_origin2(tp: AnyType) -> Optional[Type]: + return get_origin(_annotated(tp)) + + +def get_args2(tp: AnyType) -> Tuple[AnyType, ...]: + return get_args(_annotated(tp)) + + +def get_origin_or_type2(tp: AnyType) -> AnyType: + tp2 = _annotated(tp) + origin = get_origin(tp2) + return origin if origin is not None else tp2 + + +def keep_annotations(tp: AnyType, annotated: AnyType) -> AnyType: + return Annotated[(tp, *get_args(annotated)[1:])] if is_annotated(annotated) else tp + + +def with_parameters(tp: AnyType) -> AnyType: + return tp[tp.__parameters__] if getattr(tp, "__parameters__", ()) else tp + + +def is_union_of(tp: AnyType, of: AnyType) -> bool: + return tp == of or (is_union(get_origin_or_type2(tp)) and of in get_args2(tp)) + + +if sys.version_info < (3, 7): + LIST_ORIGIN = List + SET_ORIGIN = Set + TUPLE_ORIGIN = Tuple + DICT_ORIGIN = Dict +else: + LIST_ORIGIN = typing_origin(list) + SET_ORIGIN = typing_origin(set) + TUPLE_ORIGIN = typing_origin(tuple) + DICT_ORIGIN = typing_origin(dict) + + +def replace_builtins(tp: AnyType) -> AnyType: + origin = get_origin2(tp) + if origin is None: + return tp + args = tuple(map(replace_builtins, get_args2(tp))) + replacement: Any + if origin in COLLECTION_TYPES: + if issubclass(origin, collections.abc.Set): + replacement = SET_ORIGIN + elif issubclass(origin, tuple) and (len(args) < 2 or args[1] is not ...): + replacement = TUPLE_ORIGIN + else: + replacement = LIST_ORIGIN + elif origin in MAPPING_TYPES: + replacement = DICT_ORIGIN + elif is_union(origin): + replacement = Union + else: + replacement = typing_origin(origin) + res = replacement[args] if args else replacement + return keep_annotations(res, tp) + + +def sort_by_annotations_position( + cls: Type, elts: Collection[T], key: Callable[[T], str] +) -> List[T]: + annotations: Dict[str, Any] = OrderedDict() + for base in reversed(cls.__mro__): + annotations.update(getattr(base, "__annotations__", ())) + positions = {key: i for i, key in enumerate(annotations)} + return sorted(elts, key=lambda elt: positions.get(key(elt), len(positions))) + + +def stop_signature_abuse() -> NoReturn: + raise TypeError("Stop signature abuse") + + +empty_dict: Mapping[str, Any] = MappingProxyType({}) + +ITERABLE_TYPES = { + *COLLECTION_TYPES, + *MAPPING_TYPES, + Iterable, + collections.abc.Iterable, + Container, + collections.abc.Container, +} + + +def subtyping_substitution( + supertype: AnyType, subtype: AnyType +) -> Tuple[Mapping[AnyType, AnyType], Mapping[AnyType, AnyType]]: + if not get_args(subtype) and not isinstance(subtype, type): + return {}, {} + supertype, subtype = with_parameters(supertype), with_parameters(subtype) + supertype_to_subtype, subtype_to_supertype = {}, {} + super_origin = get_origin_or_type2(supertype) + for base in generic_mro(subtype): + base_origin = get_origin_or_type2(base) + if base_origin == super_origin or ( + base_origin in ITERABLE_TYPES and super_origin in ITERABLE_TYPES + ): + for base_arg, super_arg in zip(get_args2(base), get_args2(supertype)): + if is_type_var(super_arg): + supertype_to_subtype[super_arg] = base_arg + if is_type_var(base_arg): + subtype_to_supertype[base_arg] = super_arg + break + return supertype_to_subtype, subtype_to_supertype + + +def literal_values(values: Sequence[Any]) -> Sequence[Any]: + primitive_values = [v.value if isinstance(v, Enum) else v for v in values] + if any(not isinstance(v, PRIMITIVE_TYPES) for v in primitive_values): + raise TypeError("Only primitive types are supported for Literal/Enum") + return primitive_values + + +awaitable_origin = get_origin(Awaitable[Any]) + + +def is_async(func: Callable, types: Mapping[str, AnyType] = None) -> bool: + wrapped_func = func + while hasattr(wrapped_func, "__wrapped__"): + wrapped_func = wrapped_func.__wrapped__ # type: ignore + if inspect.iscoroutinefunction(wrapped_func): + return True + if types is None: + try: + types = get_type_hints(func) + except Exception: + types = {} + return get_origin_or_type2(types.get("return")) == awaitable_origin + + +@contextmanager +def context_setter(obj: Any): + dict_copy = obj.__dict__.copy() + try: + yield + finally: + obj.__dict__.clear() + obj.__dict__.update(dict_copy) + + +def wrap_generic_init_subclass(init_subclass: Func) -> Func: + if sys.version_info >= (3, 7): + return init_subclass + + @wraps(init_subclass) + def wrapper(cls, **kwargs): + if getattr(cls, "__origin__", None) is not None: + super(cls).__init_subclass__(**kwargs) + return + init_subclass(cls, **kwargs) + + return wrapper + + +# # Because hash of generic classes is changed by metaclass after __init_subclass__ +# # classes registered in global dictionaries are no more accessible. Here is a dictionary +# # wrapper to fix this issue +if sys.version_info < (3, 7): + K = TypeVar("K") + V = TypeVar("V") + + class KeyWrapper: + def __init__(self, key): + self.key = key + + def __eq__(self, other): + return self.key == self.key + + def __hash__(self): + return hash( + id(self.key) + if getattr(self.key, "__origin__", ...) is None + else self.key + ) + + class type_dict_wrapper(MutableMapping[K, V]): + def __init__(self, wrapped: Dict[K, V]): + self.wrapped = cast(Dict[KeyWrapper, V], wrapped) + + def __delitem__(self, key: K) -> None: + del self.wrapped[KeyWrapper(key)] + + def __getitem__(self, key: K) -> V: + return self.wrapped[KeyWrapper(key)] + + def __iter__(self) -> Iterator[K]: + return iter(wrapper.key for wrapper in list(self.wrapped)) + + def __len__(self) -> int: + return len(self.wrapped) + + def __setitem__(self, key: K, value: V): + self.wrapped[KeyWrapper(key)] = value + +else: + M = TypeVar("M", bound=MutableMapping) + + def type_dict_wrapper(wrapped: M) -> M: + return wrapped + + +def deprecate_kwargs( + parameters_map: Mapping[str, Optional[str]] +) -> Callable[[Func], Func]: + def decorator(func: Func) -> Func: + wrapped = func.__init__ if isinstance(func, type) else func # type: ignore + + def wrapper(*args, **kwargs): + for param, replacement in parameters_map.items(): + if param in kwargs: + instead = f", use '{replacement}' instead" if replacement else "" + warnings.warn( + f"{func.__name__} parameter '{param}' is deprecated{instead}", + DeprecationWarning, + ) + arg = kwargs.pop(param) + if replacement: + kwargs[replacement] = kwargs.get(replacement, arg) + return wrapped(*args, **kwargs) + + if isinstance(func, type): + func.__init__ = wraps(func.__init__)(wrapper) # type: ignore + return cast(Func, func) + else: + return cast(Func, wraps(func)(wrapper)) + + return decorator + + +def as_predicate( + collection_or_predicate: Union[Collection[T], Callable[[T], bool]] +) -> Callable[[T], bool]: + if not isinstance(collection_or_predicate, Collection): + return collection_or_predicate + collection = collection_or_predicate + if not isinstance(collection, AbstractSet): + with suppress(Exception): + collection = set(collection) + + def wrapper(elt: T) -> bool: + try: + return elt in collection + except Exception: + return False + + return wrapper diff --git a/.venv/lib/python3.9/site-packages/apischema/validation/__init__.py b/.venv/lib/python3.9/site-packages/apischema/validation/__init__.py new file mode 100644 index 0000000..92f3fa9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/validation/__init__.py @@ -0,0 +1,12 @@ +__all__ = [ + "Discard", + "LocalizedError", + "ValidationError", + "ValidatorResult", + "get_validators", + "validate", + "validator", +] + +from .errors import LocalizedError, ValidationError, ValidatorResult +from .validators import Discard, get_validators, validate, validator diff --git a/.venv/lib/python3.9/site-packages/apischema/validation/dependencies.py b/.venv/lib/python3.9/site-packages/apischema/validation/dependencies.py new file mode 100644 index 0000000..7fd005b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/validation/dependencies.py @@ -0,0 +1,63 @@ +import ast +import inspect +import textwrap +from typing import AbstractSet, Callable, Collection, Dict, Set + + +Dependencies = AbstractSet[str] + + +class DependencyFinder(ast.NodeVisitor): + def __init__(self, param: str): + self.param = param + self.dependencies: Set[str] = set() + + def visit_Attribute(self, node): + self.generic_visit(node) + if isinstance(node.value, ast.Name) and node.value.id == self.param: + self.dependencies.add(node.attr) + + # TODO Add warning in case of function call with self in parameter + # or better, follow the call, but it would be too hard (local import, etc.) + + +def first_parameter(func: Callable) -> str: + try: + return next(iter(inspect.signature(func).parameters)) + except StopIteration: + raise TypeError("Cannot compute dependencies if no parameter") + + +def find_dependencies(func: Callable) -> Dependencies: + try: + finder = DependencyFinder(first_parameter(func)) + finder.visit(ast.parse(textwrap.dedent(inspect.getsource(func)))) + except ValueError: + return set() + return finder.dependencies + + +cache: Dict[Callable, Dependencies] = {} + + +def find_all_dependencies( + cls: type, func: Callable, rec_guard: Collection[str] = () +) -> Dependencies: + """Dependencies contains class variables (because they can be "fake" ones as in + dataclasses)""" + if func not in cache: + dependencies = set(find_dependencies(func)) + for attr in list(dependencies): + if not hasattr(cls, attr): + continue + member = getattr(cls, attr) + if isinstance(member, property): + member = member.fget + if callable(member): + dependencies.remove(attr) + if member in rec_guard: + continue + rec_deps = find_all_dependencies(cls, member, {*rec_guard, member}) + dependencies.update(rec_deps) + cache[func] = dependencies + return cache[func] diff --git a/.venv/lib/python3.9/site-packages/apischema/validation/errors.py b/.venv/lib/python3.9/site-packages/apischema/validation/errors.py new file mode 100644 index 0000000..a26560a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/validation/errors.py @@ -0,0 +1,150 @@ +from dataclasses import dataclass, field, replace +from functools import reduce +from typing import ( + Any, + Collection, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + TypeVar, + Union, + overload, +) + +from apischema.aliases import Aliaser +from apischema.objects import AliasedStr +from apischema.utils import merge_opts + +try: + from apischema.typing import Annotated +except ImportError: + Annotated = ... # type: ignore + +ErrorMsg = str +Error = Union[ErrorMsg, Tuple[Any, ErrorMsg]] +# where Any = Union[Field, int, str, Iterable[Union[Field, int, str,]]] +# but Field being kind of magic not understood by type checkers, it's hidden behind Any +ErrorKey = Union[str, int] +T = TypeVar("T") +ValidatorResult = Generator[Error, None, T] + +try: + from apischema.typing import TypedDict + + class LocalizedError(TypedDict): + loc: Sequence[ErrorKey] + msg: ErrorMsg + +except ImportError: + LocalizedError = Mapping[str, Any] # type: ignore + + +@dataclass +class ValidationError(Exception): + messages: Sequence[ErrorMsg] = field(default_factory=list) + children: Mapping[ErrorKey, "ValidationError"] = field(default_factory=dict) + + def __str__(self): + return repr(self) + + def _errors(self) -> Iterator[Tuple[List[ErrorKey], ErrorMsg]]: + for msg in self.messages: + yield [], msg + for child_key in sorted(self.children): + for path, error in self.children[child_key]._errors(): + yield [child_key, *path], error + + @property + def errors(self) -> List[LocalizedError]: + return [{"loc": path, "msg": error} for path, error in self._errors()] + + @staticmethod + def from_errors(errors: Sequence[LocalizedError]) -> "ValidationError": + return reduce( + merge_errors, + [_rec_build_error(err["loc"], err["msg"]) for err in errors], + ValidationError(), + ) + + +@overload +def merge_errors( + err1: Optional[ValidationError], err2: ValidationError +) -> ValidationError: + ... + + +@overload +def merge_errors( + err1: ValidationError, err2: Optional[ValidationError] +) -> ValidationError: + ... + + +@overload +def merge_errors( + err1: Optional[ValidationError], err2: Optional[ValidationError] +) -> Optional[ValidationError]: + ... + + +@merge_opts # type: ignore +def merge_errors(err1: ValidationError, err2: ValidationError) -> ValidationError: + if err1 is None: + return err2 + if err2 is None: + return err1 + return ValidationError( + [*err1.messages, *err2.messages], + { + key: merge_errors( # type: ignore + err1.children.get(key), err2.children.get(key) + ) + for key in err1.children.keys() | err2.children.keys() + }, + ) + + +def apply_aliaser(error: ValidationError, aliaser: Aliaser) -> ValidationError: + aliased, aliased_children = False, {} + for key, child in error.children.items(): + if isinstance(key, AliasedStr): + key = str(aliaser(key)) # str because it could be a str subclass + aliased = True + child2 = apply_aliaser(child, aliaser) + aliased |= child2 is not child + aliased_children[key] = child2 + return replace(error, children=aliased_children) if aliased else error + + +def _rec_build_error(path: Sequence[ErrorKey], msg: ErrorMsg) -> ValidationError: + if not path: + return ValidationError([msg]) + else: + return ValidationError(children={path[0]: _rec_build_error(path[1:], msg)}) + + +def build_validation_error(errors: Iterable[Error]) -> ValidationError: + messages: List[ErrorMsg] = [] + children: Dict[ErrorKey, ValidationError] = {} + for error in errors: + if isinstance(error, ErrorMsg): + messages.append(error) + continue + path, msg = error + if not path: + messages.append(msg) + else: + if isinstance(path, str) or not isinstance(path, Collection): + path = (path,) + key, *remain = path + children[key] = merge_errors( + children.get(key), _rec_build_error(remain, msg) + ) + return ValidationError(messages, children) diff --git a/.venv/lib/python3.9/site-packages/apischema/validation/mock.py b/.venv/lib/python3.9/site-packages/apischema/validation/mock.py new file mode 100644 index 0000000..c4da4a7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/validation/mock.py @@ -0,0 +1,57 @@ +from dataclasses import dataclass +from functools import partial +from types import FunctionType, MethodType +from typing import Any, Mapping, Optional, TYPE_CHECKING, Type, TypeVar + +from apischema.fields import FIELDS_SET_ATTR +from apischema.objects import object_fields + +if TYPE_CHECKING: + from apischema.validation.validators import Validator + +MOCK_FIELDS_FIELD = "__mock_fields__" +MOCK_CLS_FIELD = "__mock_cls__" + + +class NonTrivialDependency(Exception): + def __init__(self, attr: str): + self.attr = attr + self.validator: Optional["Validator"] = None + + +@dataclass(init=False) +class ValidatorMock: + def __init__(self, cls: Type, values: Mapping[str, Any]): + self.cls = cls + self.values = values + + def __getattribute__(self, name: str) -> Any: + values = super().__getattribute__("values") + if name in values: + return values[name] + cls = super().__getattribute__("cls") + fields = object_fields(cls, deserialization=True) + if name in fields: + if fields[name].required: + raise NonTrivialDependency(name) + return fields[name].get_default() + if name == "__class__": + return cls + if name == "__dict__": + return {**values, FIELDS_SET_ATTR: set(values)} + if name == FIELDS_SET_ATTR: + return set(values) + if hasattr(cls, name): + member = getattr(cls, name) + # for classmethod (staticmethod are not handled) + if isinstance(member, MethodType): + return member + if isinstance(member, FunctionType): + return partial(member, self) + if isinstance(member, property): + return member.fget(self) # type: ignore + return member + raise NonTrivialDependency(name) + + +T = TypeVar("T") diff --git a/.venv/lib/python3.9/site-packages/apischema/validation/validators.py b/.venv/lib/python3.9/site-packages/apischema/validation/validators.py new file mode 100644 index 0000000..e418e9f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/validation/validators.py @@ -0,0 +1,207 @@ +from collections import defaultdict +from functools import wraps +from inspect import Parameter, isgeneratorfunction, signature +from itertools import chain +from types import MethodType +from typing import ( + AbstractSet, + Any, + Callable, + Collection, + Iterable, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Type, + TypeVar, + overload, +) + +from apischema.aliases import Aliaser +from apischema.cache import CacheAwareDict +from apischema.methods import is_method, method_class +from apischema.objects import get_alias +from apischema.objects.fields import FieldOrName, check_field_or_name, get_field_name +from apischema.types import AnyType +from apischema.typing import get_type_hints +from apischema.utils import get_origin_or_type2 +from apischema.validation.dependencies import find_all_dependencies +from apischema.validation.errors import ( + ValidationError, + apply_aliaser, + build_validation_error, + merge_errors, +) +from apischema.validation.mock import NonTrivialDependency + +_validators: MutableMapping[Type, List["Validator"]] = CacheAwareDict(defaultdict(list)) + + +def get_validators(tp: AnyType) -> Sequence["Validator"]: + return list( + chain.from_iterable(_validators[cls] for cls in getattr(tp, "__mro__", [tp])) + ) + + +class Discard(Exception): + def __init__(self, fields: Optional[AbstractSet[str]], error: ValidationError): + self.fields = fields + self.error = error + + +class Validator: + def __init__( + self, + func: Callable, + field: FieldOrName = None, + discard: Collection[FieldOrName] = None, + ): + wraps(func)(self) + self.func = func + self.field = field + # Cannot use field.name because fields are not yet initialized with __set_name__ + if field is not None and discard is None: + self.discard: Optional[Collection[FieldOrName]] = (field,) + else: + self.discard = discard + self.dependencies: AbstractSet[str] = set() + try: + parameters = signature(func).parameters + except ValueError: + self.params: AbstractSet[str] = set() + else: + if not parameters: + raise TypeError("Validator must have at least one parameter") + if any(p.kind == Parameter.VAR_KEYWORD for p in parameters.values()): + raise TypeError("Validator cannot have variadic keyword parameter") + if any(p.kind == Parameter.VAR_POSITIONAL for p in parameters.values()): + raise TypeError("Validator cannot have variadic positional parameter") + self.params = set(list(parameters)[1:]) + if isgeneratorfunction(func): + + def validate(*args, **kwargs): + errors = list(func(*args, **kwargs)) + if errors: + raise build_validation_error(errors) + + self.validate = validate + + else: + self.validate = func + + def __get__(self, instance, owner): + return self if instance is None else MethodType(self.func, instance) + + def __call__(self, *args, **kwargs): + raise RuntimeError("Method __set_name__ has not been called") + + def _register(self, owner: Type): + self.owner = owner + self.dependencies = find_all_dependencies(owner, self.func) | self.params + _validators[owner].append(self) + + def __set_name__(self, owner, name): + self._register(owner) + setattr(owner, name, self.func) + + +T = TypeVar("T") + + +def validate( + obj: T, + validators: Iterable[Validator] = None, + kwargs: Optional[Mapping[str, Any]] = None, + *, + aliaser: Aliaser = lambda s: s, +) -> T: + if validators is None: + validators = get_validators(obj.__class__) + else: + validators = list(validators) + error: Optional[ValidationError] = None + for i, validator in enumerate(validators): + try: + if not kwargs: + validator.validate(obj) + elif validator.params == kwargs.keys(): + validator.validate(obj, **kwargs) + else: + validator.validate(obj, **{k: kwargs[k] for k in validator.params}) + except ValidationError as e: + err = apply_aliaser(e, aliaser) + except NonTrivialDependency as exc: + exc.validator = validator + raise + except AssertionError: + raise + except Exception as e: + err = ValidationError([str(e)]) + else: + continue + if validator.field is not None: + alias = getattr(get_alias(validator.owner), get_field_name(validator.field)) + err = ValidationError(children={aliaser(alias): err}) + error = merge_errors(error, err) + if validator.discard: + try: + discarded = set(map(get_field_name, validator.discard)) + next_validators = ( + v for v in validators[i:] if v.dependencies.isdisjoint(discarded) + ) + validate(obj, next_validators, kwargs, aliaser=aliaser) + except ValidationError as err: + raise merge_errors(error, err) + else: + raise error + if error is not None: + raise error + return obj + + +V = TypeVar("V", bound=Callable) + + +@overload +def validator(func: V) -> V: + ... + + +@overload +def validator( + field: Any = None, *, discard: Any = None, owner: Type = None +) -> Callable[[V], V]: + ... + + +def validator(arg=None, *, field=None, discard=None, owner=None): + if callable(arg): + validator_ = Validator(arg, field, discard) + if is_method(arg): + cls = method_class(arg) + if cls is None: + if owner is not None: + raise TypeError("Validator owner cannot be set for class validator") + return validator_ + elif owner is None: + owner = cls + if owner is None: + try: + first_param = next(iter(signature(arg).parameters)) + owner = get_origin_or_type2(get_type_hints(arg)[first_param]) + except Exception: + raise ValueError("Validator first parameter must be typed") + validator_._register(owner) + return arg + else: + field = field or arg + if field is not None: + check_field_or_name(field) + if discard is not None: + if not isinstance(discard, Collection) or isinstance(discard, str): + discard = [discard] + for discarded in discard: + check_field_or_name(discarded) + return lambda func: validator(func, field=field, discard=discard, owner=owner) diff --git a/.venv/lib/python3.9/site-packages/apischema/visitor.py b/.venv/lib/python3.9/site-packages/apischema/visitor.py new file mode 100644 index 0000000..d94339e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/apischema/visitor.py @@ -0,0 +1,218 @@ +import warnings +from dataclasses import ( # type: ignore + Field, + InitVar, + _FIELDS, + _FIELD_CLASSVAR, + make_dataclass, +) +from enum import Enum +from types import MappingProxyType +from typing import ( + Any, + Collection, + Generic, + Mapping, + Sequence, + Tuple, + Type, + TypeVar, + Union, +) + +from apischema.types import ( + AnyType, + COLLECTION_TYPES, + MAPPING_TYPES, + OrderedDict, + PRIMITIVE_TYPES, +) +from apischema.typing import ( + get_args, + get_origin, + get_type_hints, + is_annotated, + is_literal, + is_named_tuple, + is_type_var, + is_typed_dict, + is_union, + required_keys, + resolve_type_hints, +) +from apischema.utils import PREFIX, get_origin_or_type, has_type_vars, is_dataclass + +try: + from apischema.typing import Annotated +except ImportError: + Annotated = ... # type: ignore + +TUPLE_TYPE = get_origin(Tuple[Any]) + + +def dataclass_types_and_fields( + tp: AnyType, +) -> Tuple[Mapping[str, AnyType], Sequence[Field], Sequence[Field]]: + from apischema.metadata.keys import INIT_VAR_METADATA + + cls = get_origin_or_type(tp) + assert is_dataclass(cls) + types = resolve_type_hints(tp) + fields, init_fields = [], [] + for field in getattr(cls, _FIELDS).values(): + assert isinstance(field, Field) + if field._field_type == _FIELD_CLASSVAR: # type: ignore + continue + field_type = types[field.name] + if isinstance(field_type, InitVar): + types[field.name] = field_type.type # type: ignore + init_fields.append(field) + elif field_type is InitVar: + metadata = getattr(cls, _FIELDS)[field.name].metadata + if INIT_VAR_METADATA not in metadata: + raise TypeError("Before 3.8, InitVar requires init_var metadata") + init_field = (PREFIX, metadata[INIT_VAR_METADATA], ...) + tmp_cls = make_dataclass("Tmp", [init_field], bases=(cls,)) # type: ignore + types[field.name] = get_type_hints(tmp_cls, include_extras=True)[PREFIX] + if has_type_vars(types[field.name]): + raise TypeError("Generic InitVar are not supported before 3.8") + init_fields.append(field) + else: + fields.append(field) + # Use immutable return because of cache + return MappingProxyType(types), tuple(fields), tuple(init_fields) + + +class Unsupported(TypeError): + def __init__(self, tp: AnyType): + self.type = tp + + @property + def cls(self) -> AnyType: + warnings.warn( + "Unsupported.cls is deprecated, use Unsupported.type instead", + DeprecationWarning, + ) + return self.type + + +Result = TypeVar("Result", covariant=True) + + +class Visitor(Generic[Result]): + def annotated(self, tp: AnyType, annotations: Sequence[Any]) -> Result: + if Unsupported in annotations: + raise Unsupported(Annotated[(tp, *annotations)]) # type: ignore + return self.visit(tp) + + def any(self) -> Result: + raise NotImplementedError + + def collection(self, cls: Type[Collection], value_type: AnyType) -> Result: + raise NotImplementedError + + def dataclass( + self, + tp: AnyType, + types: Mapping[str, AnyType], + fields: Sequence[Field], + init_vars: Sequence[Field], + ) -> Result: + raise NotImplementedError + + def enum(self, cls: Type[Enum]) -> Result: + raise NotImplementedError + + def literal(self, values: Sequence[Any]) -> Result: + raise NotImplementedError + + def mapping( + self, cls: Type[Mapping], key_type: AnyType, value_type: AnyType + ) -> Result: + raise NotImplementedError + + def named_tuple( + self, tp: AnyType, types: Mapping[str, AnyType], defaults: Mapping[str, Any] + ) -> Result: + raise NotImplementedError + + def new_type(self, tp: AnyType, super_type: AnyType) -> Result: + return self.visit(super_type) + + def primitive(self, cls: Type) -> Result: + raise NotImplementedError + + def subprimitive(self, cls: Type, superclass: Type) -> Result: + return self.primitive(superclass) + + def tuple(self, types: Sequence[AnyType]) -> Result: + raise NotImplementedError + + def typed_dict( + self, tp: AnyType, types: Mapping[str, AnyType], required_keys: Collection[str] + ) -> Result: + raise NotImplementedError + + def union(self, alternatives: Sequence[AnyType]) -> Result: + raise NotImplementedError + + def unsupported(self, tp: AnyType) -> Result: + raise Unsupported(tp) + + def visit(self, tp: AnyType) -> Result: + origin, args = get_origin_or_type(tp), get_args(tp) + if args: + if is_annotated(tp): + return self.annotated(args[0], args[1:]) + if is_union(origin): + return self.union(args[0]) if len(args) == 1 else self.union(args) + if origin is TUPLE_TYPE: + if len(args) < 2 or args[1] is not ...: + return self.tuple(args) + if origin in COLLECTION_TYPES: + return self.collection(origin, args[0]) + if origin in MAPPING_TYPES: + return self.mapping(origin, args[0], args[1]) + if is_literal(tp): # pragma: no cover py37+ + return self.literal(args) + if origin in PRIMITIVE_TYPES: + return self.primitive(origin) + if is_dataclass(origin): + return self.dataclass(tp, *dataclass_types_and_fields(tp)) # type: ignore + if hasattr(origin, "__supertype__"): + return self.new_type(origin, origin.__supertype__) + if origin is Any: + return self.any() + if origin in COLLECTION_TYPES: + return self.collection(origin, Any) + if origin in MAPPING_TYPES: + return self.mapping(origin, Any, Any) + if isinstance(origin, type): + if issubclass(origin, Enum): + return self.enum(origin) + for primitive in PRIMITIVE_TYPES: + if issubclass(origin, primitive): + return self.subprimitive(origin, primitive) + # NamedTuple + if is_named_tuple(origin): + if hasattr(origin, "__annotations__"): + types = resolve_type_hints(origin) + elif hasattr(origin, "__field_types"): # pragma: no cover + types = origin.__field_types # type: ignore + else: # pragma: no cover + types = OrderedDict((f, Any) for f in origin._fields) # type: ignore # noqa: E501 + return self.named_tuple( + origin, types, origin._field_defaults # type: ignore + ) + if is_literal(origin): # pragma: no cover py36 + return self.literal(origin.__values__) # type: ignore + if is_typed_dict(origin): + return self.typed_dict( + origin, resolve_type_hints(origin), required_keys(origin) + ) + if is_type_var(origin): + if origin.__constraints__: + return self.visit(Union[origin.__constraints__]) + else: + return self.any() + return self.unsupported(tp) diff --git a/.venv/lib/python3.9/site-packages/attr/__init__.py b/.venv/lib/python3.9/site-packages/attr/__init__.py new file mode 100644 index 0000000..f95c96d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/__init__.py @@ -0,0 +1,80 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import sys + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "21.4.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable # noqa: F401 + + __all__.extend(("define", "field", "frozen", "mutable")) diff --git a/.venv/lib/python3.9/site-packages/attr/__init__.pyi b/.venv/lib/python3.9/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..c0a2126 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/__init__.pyi @@ -0,0 +1,484 @@ +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List[Attribute[Any]]], List[Attribute[Any]] +] +_CompareWithType = Callable[[Any, Any], bool] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute[Any], ...]): + def __getattr__(self, name: str) -> Attribute[Any]: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... +def validate(inst: Any) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/.venv/lib/python3.9/site-packages/attr/_cmp.py b/.venv/lib/python3.9/site-packages/attr/_cmp.py new file mode 100644 index 0000000..6cffa4d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_cmp.py @@ -0,0 +1,154 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import functools + +from ._compat import new_class +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body)) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = "__%s__" % (name,) + method.__doc__ = "Return a %s b. Computed by attrs." % ( + _operation_names[name], + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/.venv/lib/python3.9/site-packages/attr/_cmp.pyi b/.venv/lib/python3.9/site-packages/attr/_cmp.pyi new file mode 100644 index 0000000..e71aaff --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Type + +from . import _CompareWithType + +def cmp_using( + eq: Optional[_CompareWithType], + lt: Optional[_CompareWithType], + le: Optional[_CompareWithType], + gt: Optional[_CompareWithType], + ge: Optional[_CompareWithType], + require_same_type: bool, + class_name: str, +) -> Type: ... diff --git a/.venv/lib/python3.9/site-packages/attr/_compat.py b/.venv/lib/python3.9/site-packages/attr/_compat.py new file mode 100644 index 0000000..dc0cb02 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_compat.py @@ -0,0 +1,261 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import platform +import sys +import threading +import types +import warnings + + +PY2 = sys.version_info[0] == 2 +PYPY = platform.python_implementation() == "PyPy" +PY36 = sys.version_info[:2] >= (3, 6) +HAS_F_STRINGS = PY36 +PY310 = sys.version_info[:2] >= (3, 10) + + +if PYPY or PY36: + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +if PY2: + from collections import Mapping, Sequence + + from UserDict import IterableUserDict + + # We 'bundle' isclass instead of using inspect as importing inspect is + # fairly expensive (order of 10-15 ms for a modern machine in 2016) + def isclass(klass): + return isinstance(klass, (type, types.ClassType)) + + def new_class(name, bases, kwds, exec_body): + """ + A minimal stub of types.new_class that we need for make_class. + """ + ns = {} + exec_body(ns) + + return type(name, bases, ns) + + # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. + TYPE = "type" + + def iteritems(d): + return d.iteritems() + + # Python 2 is bereft of a read-only dict proxy, so we make one! + class ReadOnlyDict(IterableUserDict): + """ + Best-effort read-only dict wrapper. + """ + + def __setitem__(self, key, val): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) + + def update(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) + + def __delitem__(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) + + def clear(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) + + def pop(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) + + def popitem(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) + + def setdefault(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) + + def __repr__(self): + # Override to be identical to the Python 3 version. + return "mappingproxy(" + repr(self.data) + ")" + + def metadata_proxy(d): + res = ReadOnlyDict() + res.data.update(d) # We blocked update, so we have to do it like this. + return res + + def just_warn(*args, **kw): # pragma: no cover + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + +else: # Python 3 and later. + from collections.abc import Mapping, Sequence # noqa + + def just_warn(*args, **kw): + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + def isclass(klass): + return isinstance(klass, type) + + TYPE = "class" + + def iteritems(d): + return d.items() + + new_class = types.new_class + + def metadata_proxy(d): + return types.MappingProxyType(dict(d)) + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + if PY2: + co = set_first_cellvar_to.func_code + else: + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + # CPython 3.8+ has an incompatible CodeType signature + # (added a posonlyargcount argument) but also added + # CodeType.replace() to do this without counting parameters. + set_first_freevar_code = co.replace( + co_cellvars=co.co_freevars, co_freevars=co.co_cellvars + ) + else: + args = [co.co_argcount] + if not PY2: + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + if PY2: + cell = make_func_with_cell().func_closure[0] + else: + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/.venv/lib/python3.9/site-packages/attr/_config.py b/.venv/lib/python3.9/site-packages/attr/_config.py new file mode 100644 index 0000000..fc9be29 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_config.py @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/.venv/lib/python3.9/site-packages/attr/_funcs.py b/.venv/lib/python3.9/site-packages/attr/_funcs.py new file mode 100644 index 0000000..4c90085 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_funcs.py @@ -0,0 +1,422 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy + +from ._compat import iteritems +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(v) + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in iteritems(v) + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in iteritems(changes): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/.venv/lib/python3.9/site-packages/attr/_make.py b/.venv/lib/python3.9/site-packages/attr/_make.py new file mode 100644 index 0000000..d46f8a3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_make.py @@ -0,0 +1,3173 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy +import inspect +import linecache +import sys +import warnings + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + HAS_F_STRINGS, + PY2, + PY310, + PYPY, + isclass, + iteritems, + metadata_proxy, + new_class, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, +) + + +if not PY2: + import typing + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = metadata_proxy({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(object): + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + def __len__(self): + return 0 # __bool__ for Python 2 + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + if PY2: + # For some reason `type(None)` isn't callable in Python 2, but we don't + # actually need a constructor for None objects, we just need any + # available function that returns None. + def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): + return _none_constructor, _args + + else: + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see `PEP 526 `_). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} + if globs is None: + globs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = "{}-{}>".format(base_filename[:-1], count) + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in iteritems(these)] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder(object): + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = object.__setattr__ + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in iteritems(self._cls_dict) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = object.__setattr__ + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overriden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in iteritems(existing_slots) + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + qualname = getattr(self._cls, "__qualname__", None) + if qualname is not None: + cd["__qualname__"] = qualname + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + + return method + + +_CMP_DEPRECATION = ( + "The usage of `cmp` is deprecated and will be removed on or after " + "2021-06-01. Please use `eq` and `order` instead." +) + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + + auto_detect must be False on Python 2. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + an `attrs.exceptions.PythonTooOldError`. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the default + dict classes, but have some gotchas you should know about, so we + encourage you to read the `glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + attributes (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + `PEP 634 `_ (Structural + Pattern Matching). It is a tuple of all positional-only ``__init__`` + parameter names on Python 3.10 and later. Ignored on older Python + versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + """ + if auto_detect and PY2: + raise PythonTooOldError( + "auto_detect only works on Python 3 and later." + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + + if getattr(cls, "__class__", None) is None: + raise TypeError("attrs only works with new-style classes.") + + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +if PY2: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return ( + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ + ) + +else: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ == _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + ) + return unique_filename + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + if not PY2: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + " %s(self.%s)," + % ( + cmp_name, + a.name, + ) + ) + others.append( + " %s(other.%s)," + % ( + cmp_name, + a.name, + ) + ) + else: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +if HAS_F_STRINGS: + + def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r + for name, r, _ in attr_names_with_reprs + if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name + if i + else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = ( + '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + ) + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + +else: + + def _make_repr(attrs, ns, _): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the + full name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + already_repring = _compat.repr_context.already_repring + except AttributeError: + already_repring = set() + _compat.repr_context.already_repring = already_repring + + if id(self) in already_repring: + return "..." + real_cls = self.__class__ + if ns is None: + qualname = getattr(real_cls, "__qualname__", None) + if qualname is not None: # pragma: no cover + # This case only happens on Python 3.5 and 3.6. We exclude + # it from coverage, because we don't want to slow down our + # test suite by running them under coverage too for this + # one line. + class_name = qualname.rsplit(">.", 1)[-1] + else: + class_name = real_cls.__name__ + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) + # for the duration of this call, it's safe to depend on id(...) + # stability, and not need to track the instance and therefore + # worry about properties like weakref- or hash-ability. + already_repring.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + already_repring.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attrs.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict(((a.name, a) for a in attrs)) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr"] = _obj_setattr + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr('%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +if PY2: + + def _unpack_kw_only_py2(attr_name, default=None): + """ + Unpack *attr_name* from _kw_only dict. + """ + if default is not None: + arg_default = ", %s" % default + else: + arg_default = "" + return "%s = _kw_only.pop('%s'%s)" % ( + attr_name, + attr_name, + arg_default, + ) + + def _unpack_kw_only_lines_py2(kw_only_args): + """ + Unpack all *kw_only_args* from _kw_only dict and handle errors. + + Given a list of strings "{attr_name}" and "{attr_name}={default}" + generates list of lines of code that pop attrs from _kw_only dict and + raise TypeError similar to builtin if required attr is missing or + extra key is passed. + + >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) + try: + a = _kw_only.pop('a') + b = _kw_only.pop('b', 42) + except KeyError as _key_error: + raise TypeError( + ... + if _kw_only: + raise TypeError( + ... + """ + lines = ["try:"] + lines.extend( + " " + _unpack_kw_only_py2(*arg.split("=")) + for arg in kw_only_args + ) + lines += """\ +except KeyError as _key_error: + raise TypeError( + '__init__() missing required keyword-only argument: %s' % _key_error + ) +if _kw_only: + raise TypeError( + '__init__() got an unexpected keyword argument %r' + % next(iter(_kw_only)) + ) +""".split( + "\n" + ) + return lines + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + arg_name = a.name.lstrip("_") + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = "%s=NOTHING" % (arg_name,) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append("if %s is not NOTHING:" % (arg_name,)) + + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None and not PY2: + # Try to get the type from the converter. + sig = None + try: + sig = inspect.signature(a.converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + sig_params = list(sig.parameters.values()) + if ( + sig_params + and sig_params[0].annotation + is not inspect.Parameter.empty + ): + annotations[arg_name] = sig_params[0].annotation + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append( + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + if PY2: + lines = _unpack_kw_only_lines_py2(kw_only_args) + lines + + args += "%s**_kw_only" % (", " if args else "",) # leading comma + else: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + return ( + """\ +def {init_name}(self, {args}): + {lines} +""".format( + init_name=("__attrs_init__" if attrs_init else "__init__"), + args=args, + lines="\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +class Attribute(object): + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict + ) + + @property + def cmp(self): + """ + Simulate the presence of a cmp attribute and warn. + """ + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) + + return self.eq and self.order + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr(object): + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory(object): + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = dict((a, attrib()) for a in attrs) + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator(object): + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not PY2: + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type. + sig = None + try: + sig = inspect.signature(converters[0]) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if ( + params + and params[0].annotation is not inspect.Parameter.empty + ): + pipe_converter.__annotations__["val"] = params[ + 0 + ].annotation + # Get return type. + sig = None + try: + sig = inspect.signature(converters[-1]) + except (ValueError, TypeError): # inspect failed + pass + if sig and sig.return_annotation is not inspect.Signature().empty: + pipe_converter.__annotations__[ + "return" + ] = sig.return_annotation + + return pipe_converter diff --git a/.venv/lib/python3.9/site-packages/attr/_next_gen.py b/.venv/lib/python3.9/site-packages/attr/_next_gen.py new file mode 100644 index 0000000..0682536 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_next_gen.py @@ -0,0 +1,216 @@ +# SPDX-License-Identifier: MIT + +""" +These are Python 3.6+-only and keyword-only APIs that call `attr.s` and +`attr.ib` with different default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* (see :term:`slotted classes` for potentially surprising + behaviors) + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - *match_args=True* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/.venv/lib/python3.9/site-packages/attr/_version_info.py b/.venv/lib/python3.9/site-packages/attr/_version_info.py new file mode 100644 index 0000000..cdaeec3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_version_info.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo(object): + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/.venv/lib/python3.9/site-packages/attr/_version_info.pyi b/.venv/lib/python3.9/site-packages/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/.venv/lib/python3.9/site-packages/attr/converters.py b/.venv/lib/python3.9/site-packages/attr/converters.py new file mode 100644 index 0000000..1fb6c05 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/converters.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import PY2 +from ._make import NOTHING, Factory, pipe + + +if not PY2: + import inspect + import typing + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + if not PY2: + sig = None + try: + sig = inspect.signature(converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + optional_converter.__annotations__["val"] = typing.Optional[ + params[0].annotation + ] + if sig.return_annotation is not inspect.Signature.empty: + optional_converter.__annotations__["return"] = typing.Optional[ + sig.return_annotation + ] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError("Cannot convert value to bool: {}".format(val)) diff --git a/.venv/lib/python3.9/site-packages/attr/converters.pyi b/.venv/lib/python3.9/site-packages/attr/converters.pyi new file mode 100644 index 0000000..0f58088 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, Optional, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/.venv/lib/python3.9/site-packages/attr/exceptions.py b/.venv/lib/python3.9/site-packages/attr/exceptions.py new file mode 100644 index 0000000..b2f1edc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/exceptions.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/.venv/lib/python3.9/site-packages/attr/exceptions.pyi b/.venv/lib/python3.9/site-packages/attr/exceptions.pyi new file mode 100644 index 0000000..f268011 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/.venv/lib/python3.9/site-packages/attr/filters.py b/.venv/lib/python3.9/site-packages/attr/filters.py new file mode 100644 index 0000000..a1978a8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/filters.py @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import isclass +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/.venv/lib/python3.9/site-packages/attr/filters.pyi b/.venv/lib/python3.9/site-packages/attr/filters.pyi new file mode 100644 index 0000000..9938668 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/.venv/lib/python3.9/site-packages/attr/py.typed b/.venv/lib/python3.9/site-packages/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/attr/setters.py b/.venv/lib/python3.9/site-packages/attr/setters.py new file mode 100644 index 0000000..b1cbb5d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from __future__ import absolute_import, division, print_function + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +NO_OP = object() +""" +Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. + +Does not work in `pipe` or within lists. + +.. versionadded:: 20.1.0 +""" diff --git a/.venv/lib/python3.9/site-packages/attr/setters.pyi b/.venv/lib/python3.9/site-packages/attr/setters.pyi new file mode 100644 index 0000000..3f5603c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar, cast + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/.venv/lib/python3.9/site-packages/attr/validators.py b/.venv/lib/python3.9/site-packages/attr/validators.py new file mode 100644 index 0000000..0b0c834 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/validators.py @@ -0,0 +1,561 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + +from __future__ import absolute_import, division, print_function + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator(object): + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator(object): + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call (options + are `re.fullmatch`, `re.search`, `re.match`, default + is ``None`` which means either `re.fullmatch` or an emulation of + it on Python 2). For performance reasons, they won't be used directly + but on a pre-`re.compile`\ ed pattern. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + fullmatch = getattr(re, "fullmatch", None) + valid_funcs = (fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + elif fullmatch: + match_func = pattern.fullmatch + else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203) + pattern = re.compile( + r"(?:{})\Z".format(pattern.pattern), pattern.flags + ) + match_func = pattern.match + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator(object): + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator(object): + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator(object): + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator(object): + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable(object): + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping(object): + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator(object): + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator(object): + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(max=self.max_length) + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) diff --git a/.venv/lib/python3.9/site-packages/attr/validators.pyi b/.venv/lib/python3.9/site-packages/attr/validators.pyi new file mode 100644 index 0000000..5e00b85 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attr/validators.pyi @@ -0,0 +1,78 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst new file mode 100644 index 0000000..f14ef6c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst @@ -0,0 +1,11 @@ +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/LICENSE new file mode 100644 index 0000000..7ae3df9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/METADATA new file mode 100644 index 0000000..aa327d5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/METADATA @@ -0,0 +1,232 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 21.4.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS.rst +Provides-Extra: dev +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev' +Requires-Dist: hypothesis ; extra == 'dev' +Requires-Dist: pympler ; extra == 'dev' +Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' +Requires-Dist: six ; extra == 'dev' +Requires-Dist: mypy ; extra == 'dev' +Requires-Dist: pytest-mypy-plugins ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: furo ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: sphinx-notfound-page ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests' +Requires-Dist: hypothesis ; extra == 'tests' +Requires-Dist: pympler ; extra == 'tests' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' +Requires-Dist: six ; extra == 'tests' +Requires-Dist: mypy ; extra == 'tests' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests' +Provides-Extra: tests_no_zope +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope' +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: six ; extra == 'tests_no_zope' +Requires-Dist: mypy ; extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' + + +.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png + :alt: attrs logo + :align: center + + +``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods `_). +`Trusted by NASA `_ for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + +.. teaser-end + +For that, it gives you a class decorator and a way to declaratively define the attributes on that class: + +.. -code-begin- + +.. code-block:: pycon + + >>> from attrs import asdict, define, make_class, Factory + + >>> @define + ... class SomeClass: + ... a_number: int = 42 + ... list_of_numbers: list[int] = Factory(list) + ... + ... def hard_math(self, another_number): + ... return self.a_number + sum(self.list_of_numbers) * another_number + + + >>> sc = SomeClass(1, [1, 2, 3]) + >>> sc + SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + + >>> sc.hard_math(3) + 19 + >>> sc == SomeClass(1, [1, 2, 3]) + True + >>> sc != SomeClass(2, [3, 2, 1]) + True + + >>> asdict(sc) + {'a_number': 1, 'list_of_numbers': [1, 2, 3]} + + >>> SomeClass() + SomeClass(a_number=42, list_of_numbers=[]) + + >>> C = make_class("C", ["a", "b"]) + >>> C("foo", "bar") + C(a='foo', b='bar') + + +After *declaring* your attributes ``attrs`` gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable ``__repr__``, +- a equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with ``attrs``. +Simply assign ``attrs.field()`` to the attributes instead of annotating them with types. + +---- + +This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0. +The classic APIs (``@attr.s``, ``attr.ib``, plus their serious business aliases) and the ``attr`` package import name will remain **indefinitely**. + +Please check out `On The Core API Names `_ for a more in-depth explanation. + + +Data Classes +============ + +On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` are a descendant of ``attrs``). +In practice it does a lot more and is more flexible. +For instance it allows you to define `special handling of NumPy arrays for equality checks `_, or allows more ways to `plug into the initialization process `_. + +For more details, please refer to our `comparison page `_. + + +.. -getting-help- + +Getting Help +============ + +Please use the ``python-attrs`` tag on `Stack Overflow `_ to get help. + +Answering questions of your fellow developers is also a great way to help the project! + + +.. -project-information- + +Project Information +=================== + +``attrs`` is released under the `MIT `_ license, +its documentation lives at `Read the Docs `_, +the code on `GitHub `_, +and the latest release on `PyPI `_. +It’s rigorously tested on Python 2.7, 3.5+, and PyPy. + +We collect information on **third-party extensions** in our `wiki `_. +Feel free to browse and add your own! + +If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! + + +``attrs`` for Enterprise +------------------------ + +Available as part of the Tidelift Subscription. + +The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +`Learn more. `_ + + +Release Information +=================== + +21.4.0 (2021-12-29) +------------------- + +Changes +^^^^^^^ + +- Fixed the test suite on PyPy3.8 where ``cloudpickle`` does not work. + `#892 `_ +- Fixed ``coverage report`` for projects that use ``attrs`` and don't set a ``--source``. + `#895 `_, + `#896 `_ + +`Full changelog `_. + +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? + + diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/RECORD new file mode 100644 index 0000000..3a40298 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/RECORD @@ -0,0 +1,56 @@ +attr/__init__.py,sha256=_zhJ4O8Q5KR5gaIrjX73vkR5nA6NjfpMGXQChEdNljI,1667 +attr/__init__.pyi,sha256=ubRkstoRHPpQN17iA0OCh8waIwZ5NeJgbz0lwI8XUjY,15100 +attr/__pycache__/__init__.cpython-39.pyc,, +attr/__pycache__/_cmp.cpython-39.pyc,, +attr/__pycache__/_compat.cpython-39.pyc,, +attr/__pycache__/_config.cpython-39.pyc,, +attr/__pycache__/_funcs.cpython-39.pyc,, +attr/__pycache__/_make.cpython-39.pyc,, +attr/__pycache__/_next_gen.cpython-39.pyc,, +attr/__pycache__/_version_info.cpython-39.pyc,, +attr/__pycache__/converters.cpython-39.pyc,, +attr/__pycache__/exceptions.cpython-39.pyc,, +attr/__pycache__/filters.cpython-39.pyc,, +attr/__pycache__/setters.cpython-39.pyc,, +attr/__pycache__/validators.cpython-39.pyc,, +attr/_cmp.py,sha256=JP0N7OIyTqIR3prUDfMZOR4DV4tlV_xXf39-bQg7xOo,4165 +attr/_cmp.pyi,sha256=oyjJVytrwwkUJOoe332IiYzp6pCVZEKKcKveH-ev604,317 +attr/_compat.py,sha256=i8u27AAK_4SzQnmTf3aliGV27UdYbJxdZ-O0tOHbLU8,8396 +attr/_config.py,sha256=aj1Lh8t2CuVa5nSxgCrLQtg_ZSdO8ZKeNJQd6RvpIp8,892 +attr/_funcs.py,sha256=sm_D12y2IyRW_bCnR7M-O7U5qHaieXr0BzINwJ7_K38,14753 +attr/_make.py,sha256=D05j0_ckcVIRFn2xHch5SPUCwh3t7WpeFj-3Ku9SocQ,102736 +attr/_next_gen.py,sha256=s5jCsVEQ4IhOjAykP4N0ETaWpg0RsgQttMvEZErUrhQ,5752 +attr/_version_info.py,sha256=sxD9yNai0jGbur_-RGEQHbgV2YX5_5G9PhrhBA5pA54,2194 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=uiiWTz8GLJe8I1Ty7UICK1DegVUnqHTXbOSnar7g7Nk,4078 +attr/converters.pyi,sha256=MQo7iEzPNVoFpKqD30sVwgVpdNoIeSCF2nsXvoxLZ-Y,416 +attr/exceptions.py,sha256=BMg7AljkJnvG-irMwL2TBHYlaLBXhSKnzoEWo4e42Zw,1981 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=JGZgvPGkdOfttkoL6XhXS6ZCoaVV5nZ8GCYeZNUN_mE,1124 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=rH_UtQuHgQEC7hfZyMO_SJW0R1Gus7-a83U8igZfqs8,1466 +attr/setters.pyi,sha256=7dM10rqpQVDW0y-iJUnq8rabdO5Wx2Sbo5LwNa0IXl0,573 +attr/validators.py,sha256=jVE9roaSOmTf0dJNSLHNaQNilkrlzc3pNNBKmv0g7pk,15966 +attr/validators.pyi,sha256=adn6rNbIXmRXlg_FKrTmWj0dOX0vKTsGG82Jd3YcJbQ,2268 +attrs-21.4.0.dist-info/AUTHORS.rst,sha256=wsqCNbGz_mklcJrt54APIZHZpoTIJLkXqEhhn4Nd8hc,752 +attrs-21.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-21.4.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082 +attrs-21.4.0.dist-info/METADATA,sha256=WwgR4MfxE55PpGGv21UOEOEtXZGCqwekfXYg-JgA5HY,9810 +attrs-21.4.0.dist-info/RECORD,, +attrs-21.4.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +attrs-21.4.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=CeyxLGVViAEKKsLOLaif8vF3vs1a28vsrRVLv7eMEgM,1109 +attrs/__init__.pyi,sha256=57aCxUJukK9lZlrUgk9RuWiBiPY5DzDKJAJkhbrStYw,1982 +attrs/__pycache__/__init__.cpython-39.pyc,, +attrs/__pycache__/converters.cpython-39.pyc,, +attrs/__pycache__/exceptions.cpython-39.pyc,, +attrs/__pycache__/filters.cpython-39.pyc,, +attrs/__pycache__/setters.cpython-39.pyc,, +attrs/__pycache__/validators.cpython-39.pyc,, +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/WHEEL new file mode 100644 index 0000000..0b18a28 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..eca8ba9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs-21.4.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +attr +attrs diff --git a/.venv/lib/python3.9/site-packages/attrs/__init__.py b/.venv/lib/python3.9/site-packages/attrs/__init__.py new file mode 100644 index 0000000..a704b8b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/__init__.py @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/.venv/lib/python3.9/site-packages/attrs/__init__.pyi b/.venv/lib/python3.9/site-packages/attrs/__init__.pyi new file mode 100644 index 0000000..7426fa5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/__init__.pyi @@ -0,0 +1,63 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import define as define +from attr import evolve as evolve +from attr import Factory as Factory +from attr import exceptions as exceptions +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/.venv/lib/python3.9/site-packages/attrs/converters.py b/.venv/lib/python3.9/site-packages/attrs/converters.py new file mode 100644 index 0000000..edfa8d3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/.venv/lib/python3.9/site-packages/attrs/exceptions.py b/.venv/lib/python3.9/site-packages/attrs/exceptions.py new file mode 100644 index 0000000..bd9efed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/.venv/lib/python3.9/site-packages/attrs/filters.py b/.venv/lib/python3.9/site-packages/attrs/filters.py new file mode 100644 index 0000000..5295900 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/.venv/lib/python3.9/site-packages/attrs/py.typed b/.venv/lib/python3.9/site-packages/attrs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/attrs/setters.py b/.venv/lib/python3.9/site-packages/attrs/setters.py new file mode 100644 index 0000000..9b50770 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/.venv/lib/python3.9/site-packages/attrs/validators.py b/.venv/lib/python3.9/site-packages/attrs/validators.py new file mode 100644 index 0000000..ab2c9b3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/METADATA new file mode 100644 index 0000000..7cc9501 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/METADATA @@ -0,0 +1,101 @@ +Metadata-Version: 2.1 +Name: based58 +Version: 0.1.0 +Classifier: Programming Language :: Rust +Classifier: Programming Language :: Python :: Implementation :: CPython +Summary: A fast Python library for Base58 and Base58Check +Author-email: kevinheavey +License: MIT License + + Copyright (c) 2022 Kevin Heavey + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +Requires-Python: >=3.7 +Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM +Project-URL: repository, https://github.com/kevinheavey/based58 +Project-URL: homepage, https://github.com/kevinheavey/based58 +Project-URL: documentation, https://kevinheavey.github.io/based58/ +Project-URL: changelog, https://github.com/kevinheavey/based58/blob/main/CHANGELOG.md + +# based58 + +A fast base-58 Python library + +`based58` is a fast Python library for +[Base58](https://en.wikipedia.org/wiki/Binary-to-text_encoding#Base58) +encoding and decoding. It includes support for Base58Check and configurable alphabets. + +It is +[significantly faster](https://gist.github.com/kevinheavey/2abad728d7658c136de0078d667d7267) +than the pure-Python +[base58 library](https://gist.github.com/kevinheavey/2abad728d7658c136de0078d667d7267), +as it calls the Rust [bs58 library](https://github.com/mycorrhiza/bs58-rs) +under the hood. + +The API mimics that of the `base58` library, with the exception that string inputs are not +supported, only bytes. + +## Installation + + pip install based58 + +Note: requires Python >= 3.7. + +## Usage + +```python +>>> import based58 +>>> data = [1, 2, 3] +>>> based58.b58encode(b'hello world') +b'StV1DL6CwTryKyV' +>>> based58.b58decode(b'StV1DL6CwTryKyV') +b'hello world' +>>> based58.b58encode_check(b'hello world') +b'3vQB7B6MrGQZaxCuFg4oh' +>>> based58.b58decode_check(b'3vQB7B6MrGQZaxCuFg4oh') +b'hello world' +>>> based58.b58encode(b'hello world', alphabet=based58.Alphabet.RIPPLE) +b'StVrDLaUATiyKyV' +>>> based58.b58decode(b'StVrDLaUATiyKyV', alphabet=based58.Alphabet.RIPPLE) +b'hello world' +``` + +## Development + +### Setup + +1. Install [poetry](https://python-poetry.org/) +2. Install dev dependencies: + +``` +poetry install +``` + +3. Activate the poetry shell: + +```sh +poetry shell +``` + +### Testing + +1. Run `maturin develop` to compile the Rust code. +2. Run `make fmt`, `make lint`, and `make test`. + diff --git a/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/RECORD new file mode 100644 index 0000000..5d4374f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/RECORD @@ -0,0 +1,9 @@ +based58-0.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +based58-0.1.0.dist-info/METADATA,sha256=ygNfDgbxnmk7jcneJILmF5MI8YDaqr5UM1cYXK2o6RM,3292 +based58-0.1.0.dist-info/RECORD,, +based58-0.1.0.dist-info/WHEEL,sha256=Yu1SWYWJXjU1UON6y05fGuI5koj073qp52CX-YGIEMU,145 +based58/__init__.py,sha256=rTQNkTUWjONN3Zx7r-oORMd8_T0EOkhWqMf1HcgQCDY,50 +based58/__init__.pyi,sha256=vb88ymfuMNAzkDeDknZt-DZtNrYhrYQRGpSooSHmwXY,610 +based58/__pycache__/__init__.cpython-39.pyc,, +based58/based58.abi3.so,sha256=8i8861HC1lFNVcow6ALnkC3sSm7ZV6W1CWRUzMyTC-I,1202269 +based58/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/WHEEL new file mode 100644 index 0000000..e4ea078 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/based58-0.1.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: maturin (0.12.9) +Root-Is-Purelib: false +Tag: cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2 diff --git a/.venv/lib/python3.9/site-packages/based58/__init__.py b/.venv/lib/python3.9/site-packages/based58/__init__.py new file mode 100644 index 0000000..a68d074 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/based58/__init__.py @@ -0,0 +1,3 @@ +from .based58 import * + +__doc__ = based58.__doc__ diff --git a/.venv/lib/python3.9/site-packages/based58/__init__.pyi b/.venv/lib/python3.9/site-packages/based58/__init__.pyi new file mode 100644 index 0000000..e7ddf8c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/based58/__init__.pyi @@ -0,0 +1,17 @@ +class Alphabet: + BITCOIN: "Alphabet" + MONERO: "Alphabet" + RIPPLE: "Alphabet" + FLICKR: "Alphabet" + DEFAULT: "Alphabet" + def __init__(self, base: bytes) -> None: ... + def __repr__(self) -> str: ... + +def b58decode(val: bytes, alphabet: Alphabet = Alphabet.BITCOIN) -> bytes: ... +def b58encode(val: bytes, alphabet: Alphabet = Alphabet.BITCOIN) -> bytes: ... +def b58decode_check( + val: bytes, alphabet: Alphabet = Alphabet.BITCOIN, expected_ver: int = None +) -> bytes: ... +def b58encode_check( + val: bytes, alphabet: Alphabet = Alphabet.BITCOIN, expected_ver: int = None +) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/based58/based58.abi3.so b/.venv/lib/python3.9/site-packages/based58/based58.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..79aaf23350051c37058598e326294ef403feb574 GIT binary patch literal 1202269 zcmeFa3wRXe^*_D~2`q)UK@0?M1Vsfen5fhQh$O(mtn31cC{{5lf_SH}3wViyCee)J zXnLVVTiRHA!M4^|MWZ#C04Bj}#44#+h*u^qYSc9%R^UmzfhJe`b<@f(SPx3sQ znR(~EocFxvJ?FgVocEpQ_dUJd<4D?{22LnAA=)ph_26`~igMl6l^kAR|13eh%!9WiN zdN9y~fgTL>V4w#BJs9Z0Ko16bFwld69t`wgpa%mz80f)34+eTL(1U><4D?{22LnAA z=)ph_26`~igMl6l^kAR|13eh%!9WiNdN9y~fgTL>V4w#BJs9Z0Ko16bFwld6|6UBd z^3_NCu<_rEjsKoL_@(3L^Laewo=RzGDE=?+%X2b(%^#`yjq}2p<1nvC738xGYL}YS zV^LA`#%0&scvaCgH!iqI>QC-(LgE*LOMc${*@; z!$ta7XuslR|8Yo$?{XWSg=ecme0JC8s!DDX^NaPk-qtC6c?#^bF3(Os6Z7l(D~guf ze8ZJDU0<{)ve?Gk{`D0&z@R?ombq9@GM=uwB63Bdyc&nZbHPRh-yFLVKD+BnIPMx&z_`wcGowkONH_JY%sglSJkNMTWHtEXLo%A4^-ck zON6O(tQ*!8V-m2=l;gB^cMy1U_qn{IRxy4H93 z^9sIfCj+hrtB-W2;a5}?3QnIEm>g8I3n?~7EO1Jz=S6zrQx{%zsxS_1&bG5 z5m|Wpl~*j8f7O`W(eoDF99ekNji;TNciL4~EC%Bly@XuH<2labDN!)@@swcFmh|f5 zacF?=D|+|wRQ5ytX?;Aoct#d!EBlB(o>ZJh)82t{Lwb4oBYh@*{JR%G@yEPB#??*N zlEL+_z@ZTpYhYj-^|Gi|LM)bGmw^rpTYrJ^$w7Gg`LJ; zC*xf@e$F4o8sTPLgh$P^zMj*gOBSDg{WVwecMVwk($Qy(zUpQ?NFH_YPI9?n7+`Nw zxmhI#C@IIav_ki2* z^;~)|(1U><4E%3nV78{eUwGjxt#ntm*4YrbIjHZ`jHy1&9N^LNKGpnFKh;Y2_4~UI zaB2QY+qLq#NJh{&Ow+@4njUD<8UxMbW-%Y`>Il4M7Pn|-xbA5tYx;KUD_l#9TfC7Y zG$YU&)cXYW%Y0htrtC`!FPpnjGoqGOzB`&~EzV2vXhw0XmG?%9XMulM8Ef+hd>1JQ za{-~h>?cSSC}#l5lzv)XJD_X_l&OW!QbP~wc-dg$Q z$k3oMSgU_KMbl^3NEaFdP3)S0UxOQh-wg2awn5Y1wDOTuTHNf73SfWELXs9v2J-C;E&!9{+U62cTn%t%C|&L)b!UhBTp;ujEvNbANo*1WEfft z>7QDc-V7MZ>!M>~$A$G}z8YdNXIt#_F!CG~(mROJOgqoOc#fDqkh5FU0pL_$PF+a< zJg66?2lc*y)MrELA69~@1fO#^c7YG$ukY0iWZ#prY=LH`?L$s&Udl?%+tk^r8Mmfu z#-z;7^w=QW`>c@|(nKO(X~rVy#dMWCGo54GRiWP^%43vBRaN?e+`n zb#;?Tob;2z8%J#3JMWh`qfx}`Dl!!X4aPM>+H1tILhlOkz237d>V5Z<*V@m zGpZ7|6}&afN=XS9v$8aONoE@mgceq*PNmNKp0roT9A;{;0v*)Lo8(Da$5XR-XE|*I6@+y^Qt&_*B^++A-WWXB6qPRKqiCUSk5*SIJK&es#TsM338Am!<7CStE6i> z&}6KVKI^kGHGNvTrcd%U^!CQ4U<6j8x{$e!<(__8;OVgLcoRKPowCz9L;7y(CM0X- ztzJ8ApLH1$8hQi2E!xUWBw(9;xTxj6<`hrtEX~Xx2E`U6avn?>Av5(1Tr_j`>h)Zy zTPa(R$?qyP52gJ5W2kxK0{=~c<^`Jn_6Bmk5i*4^^I{oz2MVcUD&|&kjl8k)u-?%0 z9oA|8Oz~7EbIx*BkAlmDc%U0|D=zzcEeI z8?6gZK*>&kn}0!ds>izXQ#_ja?eacR=18Qy6)*Fn5y-?z5+H3ph1U4DLd)~{OY@jy zXqjqG>H#HO%;FmB>(5wqxdIOI(p0lX4?xP4E64+om$i#f7p8BMHEuo&pyURYG}bNW z3uA7HRw{YC(Fu6lMA`J{ZOkyY3G=KOGhn1-47XrnWy|gCR?MJW-s+fndKfb=P!&5C z*;nHZb1_hX=~?N#fdutN&(})#rufV570gQ2Rmj7VXaaQ`D5nQn6a1#hx@!%=Q2fVO zCyc^aMMh!$8iG7$n{+0Xoe`SipxHE)Q`gqaX=mowrV1sUbc0Y*)%AQ@MU<>3ld_(C zg;bh9QRpq>Q{-BI3e$(ZOqwFqDCBse8Mq_HJdrtQx5PT;%g)YN7*m&tA)~kibXM3o zBxrN+GlRxO>16t^{}5v11k~ib8Q{G6JKm7`9zkb>dZj;ONHIvgkGe`+*mvRoR46o? zKF$1FKxthma)G3gA~zj}yh4#19tK5z@i89F{L8wOAw`}WFVj98nMjc*?N5&KrUlh7OoB0+N>E1C=T7MgqPTA{g9aY>@NNiPY_y)2ZOQGA_4bANu} z+i32g-G@eVSHNP^J6 zQb0_jMqKBO48g!Of&3yjLQ=448^e-pggN?aA@f0aBKKQ0JqqF|Cs-htcDbzDBbRj^ z8QA=py<|uV00H0`c(n`(3#jQ{EvUDig4u?1-mK*-YwQ=SaEEo+Yt-A^^1;T(1Kr?b z88QPOKYu8JkBL(pe1ri8+Mh0$v@Hvh@$vNbBz$ZG8vm#8QQ#GPWCH@g<6Y1NA63tj z6s?D#JGk(1^1l^6_Pu%le4N(}KCXti20rdso505z0S6!X00ZrxBbT%d7#25u{OMmw z_;>+m{GY-{wny-h1qcAor&o2sN9AV0$9-76xbSi0=L#PmLjg&q52Zis)pSR9ATtDQ zQt&jCQ)2Xr3p&YSt%rB1R74#V_YSgmqg+WmQ+SgtWTBq@z#6- zT`T$k#r>&M$!3bHptcGBdh1A~t-p`#(AJxvtv5s0tafQqL$5`>aLR@;wzxml6Pp zw)Gq0Agv!zgJJzphIMwU(j%W~?4m~=)nM~qYLV4(Ei%t(D0VW&o=uwQP1& z%O-hgZNG*P?Xb2$kpN4b?6xW`D8|1z5*3OV-}`%r z@zFc+Xy*T-OBsss(s-FsEW`1XC1SE-c^vTVRxA@@{6ULVyTv%f7h$xnnaekj<|WK> zFlm0%VrGa-^K2P)k>)b;N}3li^?yQ|%fz8H*M3ScYSlzqClkpKJ>I(X#ke%zNK|$s z&396QnEAKv6&iZ~N}-|4a7mKp(dR^(d-m~?@yI+!n*VF#w@LGDA0L`D|LN5SlIAz# z4TP1LRXpl+rt6Taz^AG#e-!Mrd{S{}TR-wOJ9|ZmV^*ubA`9r#t zA^r7^mw9LiGJR+IJD%`&OMja~d;|LPFw4Q{?-S}d|L^IqsghuHM}KcYqR0xS#hL_; zd=UEEyGQ76{^de{+l#u;-xnK%{z`;OGfuz4p})EH-$s9z>^e018-#UZ_uAx_R}u0Z z)<@_}g8r6}G?2O#sRy9HxO|LOfB{rOp<6w5A4s6 z?v*t1qmK}0p!iYj*WgFTw&Kyuzq3mj@}nj3GDBJBJM*KTzRjw;lZT@w@eTOVB4#-l zKPsYT_5Yq9<=#y&y5mP7h%uWVJw{X>SRP*YZy}=BFA*Y|j!Tj}oWEZ9QM-^@#&0fl z_|Z$xd>cP{VdtUoqteR{#E-7Q8|+%r3~rsbI_+U4&IlJGT)j01`+;l>2K({d;|Jx=gRS5^tWRn zGyFf-$7*zf(H;H02r*{U-U!oG&1@{1ZH7B{DFK(Wv zxTNq><%=bjiXUO0Yl(VzaIG7AQ#Iiea4f4M0whae)rK;`8?xkYqn}TW&TcF6lTcd7O?ICB@_J`~`U2(=Afw_%3D0-r-TdE7!Evx_t1QYN{a z8MNfJ058qf>2I>?F*QHuY_6q|zqWv@}@JdcqYtD zxuuI^mFRE!Y}k&&s4!3?mS^gFbdmrZI#bn~pv&9wV`5pCs%wykCD8<}mncV1N!;?h z+WJ))!RXHNTwQDP#%5~>;8(nn0~jYz_&Xj}tJRlw$;>+Y0^yF;vxGac`^w+Z_yk|) zpZ^Ij8Q$3rcRYO^xnodo43xbW8N`r)TLBxfXr0vxws-@kKqq|kOT;~0q#3isxv^&s zSmAjn=DZo;yxER7v;}Vg>?Ea^?@tj&IpPy^Wj@kZv3l`q6+VfPdarF3x<7x@%@o??5a_$UNc2pAv^raMqpH@@$R~E!(AN7p$vHE@?jC*|uEn zq-DDu3!I>_!l&i!hzwxD525|QpdAR7u1N2U92G2W%nX{hC!C{&=wx9-A5Uxv`d(vQ z{vq25fGbSNBCX&`b|o|vn;g=MYp}Ga0|fxG)R_oy7ea(h5S#)@^p?Y4|0fzYgy~5w zV0uyu2t8aC)Te{#P4S^oZ%9Xw%chxeYD%_O0UtsA4ME4(62M~=rE8;Fw7llUCj^c0 zBQ&pr*xS&_kQWgmmySr!sf$+!!~q4w{6mXD1P0#58eI0$nG7n~w{l+zU|)hNm!=2D zE%ikQkn29V4UCK_MxTnKvxA*e(%Zm?J&ggZF+Co=W#wKEy1Zm)z13$xT}9x%l6NPR zyiigx_x-|WG4`I=rN{uI{*n{wtpNjCkwI@k))5m1lvD)XcQSua_$;`&Cw6+hb@+gG zJJSF=ldHf7PJx{&9w=x`@db^`h>cL*p2a5yjdOqup#Hs({*`qj6p@g3FJh8_3|F`e zx+UoN%DVrDM8{>oMkW5@{)L&E{&~o_B@5JDJw4U)bShv2^%iz+Map38y1+xu7CeT` zF&&iHr0m_2@}JyN-o=A?I$JULU_|)q4xX(2;JM>pN$_0#zY9-gxGR2kBgV#spRHgB z{|QovY#Xfy!_R*;-YNHW&QK=^$`wTvNsf1_oF(~uR$r$8%+N!vFA)=B$2)zDTh?T~ zuWnrNPOl*a9<*XD~>xy zslOrfv>)6f9Je~8e`57Hh5gC|A0D_T0+ zi&1{Jb>d&BZB;`_3)DnU0OtcwoslK7*q9BzdJ$B{B=s+*ftSLKIlme#w#MeCC79F@ z*0a}IQ~I&K5^n?{gT0e;MbRL14+*wgfr5%~=8zB0ReGc~ho&)6GemD#ji&1G3>`+I zCE&%RPLO~12f(J8_EkvPX>dW?)$2!~UsmcP&N>D?wC-dm5!M#hL9Qp(7IA-rrhmrZ zJH{mFjdRnP|e=yzu` zl)Rj63{hmBwpTo!vTczt9x#HtL;8o-2Nz=?j2ozNRm0;|6D<8EC6XJ`kpf)%2h#gr zT#4abYaMkfTfl=6g@~M8W#vAOW!#CP&tyL?E)#}o#nyi0djF_9fOOT8$k0oC7 zuBju->LUHddn1{^Z0uTyo!@X=KxaFT?d@rpG}8Ml#jXnRdv7{kLARL}GNBy`4_K1v zADMdndw`~NoX0<~H*A&ZNVyg%aQJ0Lz?^nR25I#lAar(jE7B1(nyHoM!b^&ZJ)VYwk-Z{A+0+qEoBgHD zxou~o(YYICN{GB;yz&H(zw8$%M*3&KvJ<-6$uw8!Y9{z~mL0m9Zr#AKH*U2P`N|!- ziex)<^|`R*OSi^51%#E4#*?t}bnAHgg>|I5$x4K-`jFBsE6;R7SB3AVT01bvfJTa* zY`uynvhtW;1+O}g_g0~+4x}n{HS2E}H|YpMSBC&vvhw~&^bhlV52QR8zp41q=3a(h zSA9Wo^K2+F)Srz7zHs?_k>l2@rS(qh{SXZuji8w5pm<`{TByIzGK*%M88W7&-!&<7 ze6Q#coYf)yosj-zSZ}qCgeN&RB4;yVPFL0YpO`dMpFF~+4;Yc0Yw}m*&`sXxXv}%U z1%+E2T{qa+hfNTkY}iKXQ;KwA{UQM|dr>9+uG~-wiOnXofkOIk}88AoW&{A_0GMDb@MC`k2 zNbBtHFNY-av^{};=vsyK4(vCj#TDM@ap)2GmW68-uC2I&U4u#QF~UF_m#(@{!I z%xMINeD)5hC~a>m*#ms&1-{c%%2Ov2V1}ea44J9FB~RjtwlPqR3~U^{H(G7Ix)l#b zzydz9aBU@CaP1Dc03Un{NLT|orI5BrDAu|ZW-u3Y5C4JCY$S30=xZg!{(vG0jugk= ze>NZg^#Wo3_W3x5HdHE3e{l-PeEc<@?D_ap`-OFvx^d0N^kl0LPlr4o zEC1@6k9P*ue7qHj-)TN(1@#w$`c_gORw>3(pY@lu=s0YQofsyL^XwL=Go&I&xhS5) zn0X!6_n}}=THNzIayw&N4{@c&Hp@LBn12e*5E%Y9ktL=ExYIh1g^~Ci68B$UV70vv z`xacF4FqW95iqYC<7jPb_)GF633mFECrOD_3)V=*OeRScy3q;>#l%1iY(l|mX7`dN z$%z_3<0L6nQ=-4DuiTU5XU7wG#x77-NG;Z6lGI_607|?9ixTzWFQPrmQ1kal~9lp+6q{MOc=b3?}WL&l8| z!B~+RZ2Q8j#B5qnsg=gO{vVM8VmC-Q?_+-%7$vsiH2cdw#ZAckVS32C9O5n)8Cs*w zAtMi0#=ATRA1yFiycP@IJwQYAvjr%in9K)LZ2W!BI`mxjR?{`GfItnT^)f6Ep=8ww zrw6KFMS0uaFQVYNSBiZm@W>F13s*&oL*`-ta|M9OL#1y=Uke#yaV0QKL1Q|AKr4CC zy6BO@#}NQnZSA^}^Mp1Ms_L+i=$~>!PDmdQlrUuU0-q1te2}-PZ7JlI*4WpF#4_bMN-$`}V6;<#NM&@##Mjr(`Uj{RjPc{xWrd8}M`EK?0T91dGmm(l_Ai(i{H6Uce474h$t~#^ zu*<#vRU3k(w>{_a_ov_hFB3ynvez5kq8USR3#k)n#K={LjK%FPP_qdNsa|v% z9i_0#20{QM_S`lZu_kB4>ZX|kt^xwrhRl@bA&cYlH|RobE&!8)a)_JbPA5&Q<~ zO1JhJPkvW>#DW)DW!zpf6*~>^_`hYZSv0;YJZ~V<)dkO!E&r?Vw2tcv&y-|%PWqpP z$Gu_@{UW~JUAZe)rWVA|zbT0t167JU*j(OGiHwZMSgzcPtJY_8QtzfZZ)UtmxS5#C z#J5%ApI+j^4~d0Ld{HH?YPS>5V&apMIHm-9V^GUxkPpo6^fg$=U}XYF2G?#O<6*i# z!sQ>q!I6u*$d#I*B1()k^pAw~4?=mdr8BV^F*U4z64rMG>vyL5%MMHRc!Y8_XDG%998h+;b9T)06R;GGkA~uhgJ$e4(soS z^?w86M$)sHmaYKz0T$sKRwtm>*Ib2X1bb%kQodPJ;iHj-Xp=eNxDj7VUenSaX!WhV zHS??z?7Msk?SPkzF9EmrmtCJ?cLB7|`#Mt5Q#Xry0kDA17C9zC1`szN+(y%c??un}ckSe0gE+<>|ir!q1wmir@Yu|ug; zl>&?!N@>*apzxSU^UI*psfNT-V`^Pb#7;GbUPS0eIN6Y)A!W-CDq+oG}mosB|W*bbNBs6o$a$ zdHm%^VbI%lsWE{7q@Ob0WYXYDaFccI1-(27a#p^{Z^?TO@=tcRKl;q zU#u1^jGf(TQ2<&A@|+`Bf(_N)M5>(d00tE1>@FqMx2*v*g|))fX(c{kFM=rK3I4g$ z$D^e|NKyOI%Xjx&V;2r|bAb5WH# z5z-j9&6tigfF&^<;j6o`3D1Vz83!j>@!84k#meLD%+BN?k0ULL@l$LS=sh;hR-w%d zgv=n@RjSN603HG*at1iq+GJ&?O6543-IFjJN=BE;tJzL@)VTpVZffKi)J&PB%Dp!h zO7uf6JZ=0rG)nrG#RhUWIxZQ7p>L1f;=G`V(epo>!!1zbEma}jbz07 zD;a|-VrTQ;W%w5;+Z{PXB;=u%l3GeBPvm%4w}wc!h_FPr(vbInxN6T+((}DvCk*V4 z{eo7+L*D=XcQ*3v{-6@HhkV%o{c-^{+K7~HjTZaue|Htd z{&zebvOj1W)CkoP?th0;#7WeE#6$H5t=zR7R2NcPX2LLf>n{cMf54~+RYt>rg^Z;m zHGQk~{RKdPKDYUj~t1ge4Ld3ZcBumy8V>XVE+fKJ}*c=c{O;VC2;^X@mvViB6nBfC|9J zDP+&pQ$__#pYw(DK8e=jee+$~OacJ?k8PoUqk_id5#aTKs-TfFB4~tA7O~SvsAMbF z$nUe_XnL#{&T24uz zFGI%?{X;1e!X6!E!VYFe&xMs6YA=s&D=M-zIB2H4h?XeuCGLiNH?xEpZm0})23X$O z2abwd^6rdZ!VQFyBUMRB`x><=h0$O3H}Y#NcDl~41FZyxFI3*6v#{}p(L2>kTRu&p}dA7fF zD^X)0Dt}zG#9!XYCp!%2rKdUR;v~gPZ6D%&+#Y})aJZr9CuLj1Zbf}o7*1hL2U*)O zTtWq!_}yoHa`L7Q=!!(T4K`VC;QUMq?i%eE5O4@2XAPx|qf zeuP;K2!-R6j&Egqdr%WduQjM|;^6tqW}raZc|d&9SgM_VFl&YMQ?o;QBpWN6K!ro3 z$DvGo)??b_YI(ve{iTZ(qshX~_mNutJ1Ls?`Jj2$zrnh|7-y4xRl)YWmS()TZsf!_ zImeUWgb5U#*g!xYy#aw7guD1N0qQr{S^(jA()E z5w1xlW3?cOuhM_6I3)J#%JX(q;&8_ysEAzgFD#Dhdp{H3G;iOR+vgw~MMjj?vLBvE zR6OvpV-wb9+AD*wwSpgNd286U2qc*xDm3o;OkeaJ`$;p-f_;!wY~?~t1b1u-ja$bd z5jrfrl$2~Z=hmjh6&iP)FZw|U zK1Q}F0Rr`czv!!&9vrkKSdu9A7oFj-zAft%IJPuphFE@lI{kzAN}f(OoX&KLjBJ}C zqvPV9uBmhurqcCEQwiLn4zRTCV{`{uqydMFv5H8yQg4A8mIXmn0ZwbjP#6+Q$(-B_ z`|hm-h>pVYg}^*+t8ja{x9H~RU&19yBG>YGuBCP^H^2E&B^GL$QM-_!=5_e*gh?BU&eQ@ZI~vT`!Jv7>*Fp1W zFrXdUsI6N4C%v`2FE#&^FV8j4suN{OM+k^GJ?5@O-<3C%QI}lIxj0d-<>o|@>l0k) zH0*K`E(9LxaG`5O1*P4|KFDGs#u3QXCqgIy?=mvyLMlweVi+g%u>WxsT_h+vPjCac zV-qoA8nF%Ck^Q94#TFd8mTpHrxXLGxmq2kIB!*p0+d65Ehp;$QLjtm?29h>h)%G08 zQO;>OP?luEazxN)pRwG5$MA2b5)8GM&#j%C(z+<?M8lRly+t24n28#s}qK(0Wp+fnbPbV)a}WJd3&qmV19 zpx#InuzUx0<50z0?ci|l3rQWAG=6b14ymgHAr=6Pwn4y?mW;m-76X5WalV@wN1lYx zDy$_85l`0N?|>b6uMBU-&J+1+CthWvBCCq8TGgxfRP_~ zle~)So|!TQMfapWlF&VUe9~<#H~$gr2FFOQWrb#{j*Gq2Sx={&yXdT6b%vegS zmtC_O#y+f8g%Z7P0ysmq5h$MCG*UJ33|=_)k-P-(oXQzbrAUd}DXq2{xWOxvZ_wXash7ksI8uMam*WucVYYncDrVN-7 zr~ca5knWFNfZNjHA35yr(iLa&cv4A1tU3d0rHOufIyJpxy63p zRgdF=(}RZ@VK-J#zc35e8e9>2s=@+l$m?+gq&r^51;~#|op>Vd6l3)WO)H(?jbITZ zmRm8wo{H^jwrw;r3W0at2u?y&YKA&oLrlb_8-2>C$7(LYF_Wgsf|yJ6rd>10Tf>%S zK$;XFD^Uhiag(9(H+C!ah!N6L8Z_^Foiq+?nyDWjFR821|0=3!hI8S$&T`^z@w0EYr(Qv5o6wlsk<;G5rEtABL|7a(| z5LRfl;t6MoF!$^B3+qLu;egDIvElr8bPSOtI25kfieZO_+^AC@f2Rtx^J%2@bCwz& z^E+7tmKY)$Q6d0zE%HG#Q4vo&?;^DmUf8_0p8$BEGH*fRg33HH1N`RV$Jm3n@pJr- z+-2RAL11rk!&v@w+2ucjv35NghQ4c|hiY?V>y%dx7SbVO+lE#@-|;1WgnvGKm-&Gf#LI z=2?8`?Z}v)Jc@i0RDq}sV>Z;{i#P=kA(M!{wx>W}6*S#VG)!>`5INcjet|uJ_|uHk zv}1r6L!j(6ccZl%QFBWFx8Hwrbkh4blHMQKA1r%}zDA4)i8f>+475v}h=fs1_c3B) z@?E!Q7M67O3NdBhp(I03}zx1N|)AS}B7R#eqy{~Dd@9u#yP%GWwQ-SJ`N;$i& zdl8u(hOIDc+%_V8nmPU)t@H)hy0%-tgCM|qa9CkON*~-@I?T&>+`<%B?Naq!Q-&tj zju7XXion*$Jk`MHcmvl<1EZY=CP@R6kHPjMv=BD0KgJu&&DmBceq7ng1DbAv`c?;v zk*5DMtXmewlL(Jqrsci1WMV7;Bbdq`)=_&PXEV0gArb^Jnt`o)$_Q5lA-ydGAC%R} zh(gqGbTIGr#i>F40vJ#ct|Gg#;;}W5ceS-3Ix}rfmdMTeVHitucC$LPYwwB#`v7ar z<`UsB?nqMZ*-MWjb?&fcLMpoVk2&!+3_|voZO8Z`E)GKnE|zjOSXG^7PW0m16v){U zteraN<7>*^|Mirh{xuesZu-OeGBj>Jw6siCk@vj+&NirP0sj*zBf>a}H>+^>25NJR^eb<4rp(1e1q&*}`o^|j0Fe$!7y6&*3%jB()UQr^)e3gJA4E)CeXMukbsNVP zj;ae(^bPARAvg(HU*z%`=2=5fMWRNOABq3~U&y>Obs#-G;HFHWpS4#ruoB z%s@JEGPv4cri;D7()wQEyicQ#hUp`tcH_Vo%6m03*MG+*&IhNsUH@c#+o+9`Dvll! z>_j}$-m$24Npssas-+l{(=peZ7gUx^@I*(ll1%>{4+CXdXXB)Xlp(MaY|uJij<&}7 z3lE50!0dw0KO+}WnV(aK`40HU!;jLL=f86q2 z$=MS7VMyPg4c{=$jErs1>BK=8sZTte;`zMLf75^o{C~lO^w0e_UDl3&Cob^kda!#c z*VBSGD@O4Bic2S?Go|Ts|BAzS(~O%75lGyarIj|cYI$Ep9}6-7?-H~MddZGlpy_Ym zR8h#{4WZ#1LVB|{s{LGZa_%0seXeTz_5o*q?!R3sZeCF6f8&U=rHuvtHxMt^fM4An zE$_4F>r%59HN)06B-qiSb-X#vylpIObYideKXEK@IHck*j2q+PI-f^@Xj|K>vYNWrub2c+T(opxz8`$7ZcDWd!v{>{ZxH zi2%h&`XQ&P7sZJ0cGw&&4thRuOrX)Ts z`fut@Y_(j{;p>$eR?U>60@@thw&fx#jD2hs4gdweI;@g_iiT(ddg61Poh1rTfB9x2 z$WSK*>#}oN^_hVINyyNXV?V|$tRb+}XvI;bl=(|;WNmTHwnj|3kP~9Y1s>aejuAFe zOHfu1yk?|EPl9}qjHSY+tqCq#_d+#!oY+MKK)Y?-l;!A!r!NA=S~ZSr_a=C za`Or&PMRE;f})kN^NsQ!qWgv!!GAsc8X36V33|hfH3}=rbSP~({YG2sR&%`EZPmd# zjCFy5io!3@T~GI%_y^0mV#K)R^5P8Gx&)MWpd?zBT+YFEjy0D$AFPJ7V_?^TzYN|B z*dR~1`G{1wam3=HhKain23$&KG^rLR0RGxNQsrL=6_{$U%#Y+r8I=t=&lyfTSxD=v z{>68?-DySGx{!&6_#5f-m}j|F56wz<#CG)b0Al;@>xDQZ-B9Ci%q~$AVs?dE66xiisCx;0RIU5g2Oc1~VnlTq<3KOgnWUgXCFhN#h{R~pB zRKjq)QHbU>YnjEcooFgbM>kmj7_8$(H`8%xyO=WDPzNlK%S8gXWkpx3D>rOcE7W{Nx%ozD9Aya>W%*#T6Y` z$rv42A4dC_cdBb^s-;0Ws98jftT63Jq&{`zk2csD(nTlot_oPG5$MPY?9SL`F*)k1 z0(hSOm;fxKW`-*iXDn3ED*7u|0>MiL^ge>T|2?A6uBY49Y`R>@{xL&x~&7~{do)H4uDY;?HMRR z!Dk$P8?1HsZR|osSg6{|A3$!y{2l%)o(tz6u?%D@EISxOHyu2pE#)tAw>1-Ob>X)t z!cZw?bGDBfHPw*Ad+I<*(LadVnqeEKmE9P|P{sPlm=Tz0L|@R?JxDNDad6?x|D#-f z{|GK<!FDN)?qb&IfGs5 zPTvB|z~LhFM>CW3tx<1@NeVAS-x3cU#;t@o4O^U%|H=m`BtFlrZ+#ySg2Av}V(zwS zah1zoUac6+a;fRNs9n3T$1K5MNGyuM+%q4&>z=^~go?X_t6dY7n$K)VjzgWIm?MN2 zGe#jGp|Jf(dLtOLqJMWOY{IGH3L6C=QSlf02B!iLqI?4*`*eo|>9dEEY`-op7wOO8^E}COr;;mbQx_vnk zY8>u?G7G6;xRf@DIEWP;L~m1}y*7b3eki-kwwcmV;sC<_O5EOMU73nqim{;IrpK*E zVIVl0Pb)UU^AU! zK`te(@k$wM@FB;aNa}^}j*=6)TSly((%N7eLI>LWiQ?9s)~3e7R^06HQoZ~?(ck7T z5iW+g$Y#g)2rbe zvO0ci^V3)VOzdPlneOn@Fm@6gfS;a$I^z8Fg!8-d(^FBgo1e=0K=4@rz!OEQe~@Z} zng1ZS&zjNI*7+7mh^k?wBxPLqJ7568`f^Fz2C9*xD=2IlN+9=P^pm54ux8wDO^K+0 zj^{u+e3K5F2jcc*MKlLoDu`Xr=|OYy2rM?SPBBx@+(_Jr6@HNqAdovWew!6uoH7FK zWI#hUS_u430!qwA5Fhz;+u19P3^169FP;a|~-=yngfns#`oYA;2OM(Iz_JJH9GZ`roqtb*ByAN5fEILhfqeC$Wf>@xPTnuG}> zVXw#cmpI~jyKGrKj2pSB5FgeHiG~J~@bYwQzrz@ieWT08YptSjxN#)um8Y<@Tar@y z15bURzev(1@_3vL5i-NgIM0PLlu5F+)d?1CVIE^utB3=93c*-q;Wl<-!+$0H3|*6+*vR zbw_%o)I@8*D)z9PdKdU{CNJhHDqba`vU;P4&qnn6`98J`n?p;i1TrBT8I0O~{J&-Z z{_AUA;D6~nWWXzUfc)Fai_uFn&PD0+9V&4rFJ|p0xF~)2FHpgP#t;r5Y9{)usUKi8 zir0FhLxF>8k)}$F5(iCf9jpQ`V0!$5*%yMIMZKH&M`1^qnEFb+TYt5)MtnR>qQgm2 z2ndulVjHoLI3LSdA94o|tkBN-@IKf2kOH}u9M$T`bZBfHWWsvwzS zVnL8viFL~sk{VN$d8~%-?M0x9J_4%Og4CMvqU}W?ck%i9%MK?rsz&RzLQq~KbJ z+cusZ30cinVH>NH7NMAPPAJkB(5$q-k&*;TfB7rKiyeq7o?^8LOO{MF{PtQ{yj>uN zF|&U!FjlgD6O)6i%+eHql4zK#rEOV%0KnBC^R-N2lq1b!_i~2nb1RM1xu{F@uJhyu zye{=JRxf&9>Lu#73XRU;tr}`YqWx|H@zgp>S8#oN$#%ZfT>dB;G|b2G-&wA$Q;2z3 zA;#$^fKwcSE^HOan$*q%H+I#mxF(h_u!(KLRw>OjYRp8u+Y724)>SBs`X!@nS<-Fo zU)BQ7a_upKxr##q1%V4Rs0*?{!)M;`gn28|*R19AjW5^wuy%zL%~Z>Cth#st+qJ(! zAw$ttsmu>ATvmv775cGP;X=?psXlHO3PHBU4MU+3U}2FF;AIz#z$At6B^OH>L4VcC zO`Mhc8HZ}Eo9?$Ke(*s;m+?p+XW~CM&zbmt4|3w;2orICDf7@DoYR@qd2orto2+lR z(VhR+0hs0 z3L`Ziz9#uti~Y`iDX~7d8_e|39QW?CUuLAD{|q6k1f^(ZTI>OID#>|Y3o{^S1*{0c z8D{DqQblvz-+BM~kBP}cI@Th7KF9;;SNx2NpW)$vXI%X8^7t_!9o8+v$#+=4MhTl< zOLrCE_yW-Ol#u|moo6Emar1l&IpSjqX}?7?`e+7Y=@DQcO7WP%?2n0(=Ikcx)mEsu zrJD-e!xP`XA3xtRWSqq9H9k3^vilP*?`K~QcEV-jF!WrVa2eMZalU29IJ^7vEw@%M zbo~G1`Ih%ABrXnizU9-$5*a+-((%88Ulou~3A_5PLprIqYe@HPcZSsAn1XLPzwFTW z+tri`n@H!$i=Fb{eP|aF>_fZKMW?`iyE~l%!nAJ2lQ1oxb))^lny+qL`|V~SrCX-e zb-&%ouA(PfN8{;`_uE~!K$w<5Y+VV(f=uf|Bp&L1JH{X>LDG~EBy|`lFb5n+`^sn_ z3ekAw^T|iBTgbSf4iWlI;k<8_%;skB@TgC6wj~xz;FUr1f@3I{Dfa2#`U|(G$dfk` zfD*|?9*&IjYG8-hbHb(XX{a6h^}~4`-2XNg3z=<^`84=u5iHB~qDSqjMj2sr(lmN%gBG)lwS#(;K^s!TDC;pi z#g2;|fl^6u#++|E%c0jQqO~`)HwWy!AQMirPjK)M=zSl0F$gq05kJOr6yukmp04Y; z(p^v}JFFo*w8)%W4I<|`S0gVZ74EP~sE>jorh^{wuE82Mmq@&!8djDXOt(55q=OSF zz0njdJJn_u>y-C82i6vO9?iN%WP@G zlP!<7+b^uw)s0IYZA8i;$)o#SMX~=IPlqgzwiR)na3eeSe}APWN&^xPE|28AP0;;O z?t=?pV4EPz>$z0d+eDZL#0pzGOl{@JX2V(E*q; zIIXV3I@x9k`%u#6XIw9WC4l-Cv$JM?pWIaYBZi0&!Pk>F`eH+o{K?r&rUvo4oQc7c zdm?A@c-ZJU_^2bMBItQZrZ#R#dNfTlLD_4-@gxZ&71GaY*E%O<0;LM#bQv70pMgAP z{=L5;6HU8BiT9(Gc(;^ze}f&td7Ef?D#4CZL|p%84|3I6uj$}1KtAHFWgRUnM@}WR z%uAulVlRZT1bL#6b|3=?Lo7Xbcp`<^1IWQ1qoLm-6oPHi5bARNc3PV|glceFF^&T$ zKmi_xc9`lC4waKZu+F**rBM;i?umiN@dm`Oh;inTG|k8ro@L#RswgQIqn5;caPwb` z`o7Y!y_PM;s#A`WHx`4~&&O8ew1-v`Va0WYH79~_t%-va@kK6!h-sQ^%*#bTLBg}L zk@vxP-r>wU*vSjiYoIBt-w1pA?dbu10h*hlh>V91#5Xe}gRDADI7wk7)TFo!FK7PzPZ+~;{Mv+G~=MA}I_0Xn|%ha>G|I6j8 z1z*5VgAuDz7Q!DuQS9h^O?%3zM$2o8&ViCj&t6FX*!tt&=?qbhKF8@5XoBaKTCD4H zaG!i36VYG(Admu~+~j)q+IiR}be-tnpRg?lM6_jF-?0}0#XRs3U^kXrtC1aD&p2H$ z;C!5}+PeDH-dI8vbE_7d~y5RbsP zoXl9yqpi@`4}kU987}rRR27LV#JE@eg!Bv&XKu8eF#x#>pE&}Rh4kU}$7PUjosR;Z z;VJUv9qi`)q_A@^*0e&fIHUSP=y;!4FXP+6Dz<;QOT4<;UIX7tEEgp4Q#Nd--LP9o zVmdA$iDvc9Rv(;mu|Zw%Jr5ybyB8o^=tLjZMHCOvW0{pfj=7P~l(J zIKDd`@AdBQW+J6(5Q47|m&Y76s01X%kY9=_@xNHUV2XhiY%khh; zTH|D2CPp3PI@y=*++ft1lA^9WR+F5OD>TKM$t;0`i}olv`NBo+xtxGMe*WGe>x&ss zeg1!;FOK(ryS^y&0r|2psK1O|1{@(X|CAg@MD{_fwl6jiX+tDUR@gu>!pS}S+G4%d zS&upBzi6|6RJeJ8e+3q~4(~8i|9%f=C|IhAWE~zkGo$G<8Q}^Dq{7Y%F~nfBoc88q zpBEA2_-$?O*dl_7r>~I>*jv~Z$ZqH8TF^%Y2kst3F*p^d`da8>&%mGh;B$~2p zl4x>>q{$(YCWlCxB#TWXRLntkqcwAi_o-Y`QODcBgI_CcZP&awGoWE0cYV+b*rVgtJA|LB9(f8u?Wq)ImOOlUzqfN}1 z`qn<0w;le!Iih`VRf9!j9Z(XeYO5e%lvkz8vk>DkpJhxH9MV{jCH-kEuzPH8!@~Al zmCpYx+>E-jM6k#aFz8biP`?^{oT=7P^rQmz(uNk=$MCw+8X`NgF$CizI>=vEAi!eF zKtln&Wu;%H_{%0jSd@O58pVMr$Z2X|()7jRY7}rlDGln#B!LZ#XN~nK#5@ggI64;s zaJx1fa})lXdMj^%|HeA*9!NbFUU?9h9rY2e#&N)q)goA6>?y9}CS00U0@WbS0on0y zg8Em;F6{;WNd1Dz{;jQn^7jB$N%TDCpj5@1)PdO33DT>BLAsn_c&G-17U=^vDCl0R z4c{JnFrk|Xe<(zVL+9k-t%xdK0A=wJY;|~MlmE#&j@fW*W@rgrXXVbwyKS^D`j2VG z_|ZTu6tSe3x z;CAU9aeK6pntD2{fuNNF%}9OdJ+=d>xFs?kOu5uZ@vJ`zC37}g)0YCs()>3@!(0&= zOh$d%4M;IkpF;{4Gq8bZdg_ySf)#Jk?L<@l5L7@t>tob|m=BNDGL@_;;lC4o^^m`> zbu<*qgL)rEeUAqB@A7@EvuBYr{Gaqc90s-T;Qogr`w<;(|AW%kG-DW6Zy4sf$dL^k zL@YcZkzhW!Rhhu7tPepo=B?S*&Z)id{?_dE1sFc%<4?i6ka=qX7kzsI^15JscKr4G zZ75p0w=;4)maNl!(WBSTbl#Dp#Pz4TKa6>3d@*QJN(;_*2Qav4CRvPBf?_ zo-xkV=ptmH=(}qA{`56#ri7}U4OX+yh%@q7T5D<`vQqE$NB5@s) zDoX61?Mnu_0jiHsWqdvD?61`13n19vb-T}6fD8K4$6yhz3X&`W;1G zfF!R3+pV@fTF#LPti|?;EL^K_#S$g}ITv1y-5Lld!nTUWfE)qR_B+%~KnAmKw#jrn zW-h<{M3ZS0?9Nt&BwB2C7B_!{^(dAt0efdg5s}j|O*DSNliW6tN}R8k;61)kj6<@> zFr2TKbXf09;>w6CII(HUM+O%GlCVA>tK(=oeiHelHk2I5!06~JVn0$}9|Q~Oi!&$I z!^-Nh3PSO5E1V$X1`4F%@+AOD1&DUysYminf3+d~NR*&a3GBUz``zMB+-4xLh4EM_ zE(?Yt^<|Wpn0X$Y%8o$g*yI?7NG_zZO_5E#KvoqX zbGkwH&pQx#N${zF56V_Te!Cat4*kUWUl;+i`YUjL2qXMjy0P_Zud?&P(t)pZ@&ug^ zt;4@!;%Ygn1hHXC6cw)(1;<+TF3)#IfUpV>8jfiUOHZ^p28C4C6Kk!*QK=ZaCXc|O z4`Bo=fjg=Ir2{b=XMMDv{dUT6&f+x5etWtkQ|-y1D;)d)Z1JG>+kb(Jh-5VmY`+ci z=pZxy6`jML_RAOu)J(k;7jOaFemfY35ZiuxCzPiIyAgR~6-~iwe%fv{ml|>7Zd||9 z_vCD$cLu~F<0Jjel682-yCrNd+WeGXICqEj{PUnueXa$St%#Z+OeGWqXV&zVb*1r? zkZ_jRo}G9q*oq-Mjf=4EYP9;Hq9pnNVdKTCu)nlJu?KS-bD~sS+>Xz6%79v9F6!m7 zCjmZ*7J-@4ifyKyFX9IbAQZORmW#D339%KgR5Cx*Ca)gGE4eBAyf-Q=Q2sjoGcjkw zpu5O7w@pU}li{=9qYFghmoK|K)Wp}UnLw5G*~8Fs>@#G{X~+tDkviYe--?d_LfttF0Kkig)8%X|s+{s8e@b}}cM?DZ*GCHpB#|6eyC9=mK z-#2+Z%1Zvbz8}}ILwWxk`ik2dj&|ba|o4OB_K8jn0p{CgO_YGpw zs`k=2DT|!7#-wa0MOBzT(0yi&M3JN8MTS!uxYsTcT}br?%hfuUDfN5+1&sR3mO&>8 z>8NTBBs`b{N^+w82=9Y>h+KlZ2+Y)1-=@|xd4%I6a(D-P5{Y07V{Xl}S*SpL_vhp7 zb{of`4X6`qVW$e|3o#WKb)kVc=D^g@Xn%6~ZxhjBs@r0#$$uL=^gD-?=7QtV{94-R zi{%p^)}HxFXN&$2d>-Ax4kfNV;SOs<+yh|G|AQ3eWV;(HQyOPc?b$(ow6FXR$O6MO zI00g+mSqOVErT;DXs+;CGbd4OBgvQwD9>sSc4DtUCl1tzkYlzntj~qN!&ToRHMpwVn1^Gz5}11&kLbvoWxS;Q0YVZ? z?`n~i_{Jw~XV&m>7!QUEHd;Rc=G6K=E?;E-Nr4Cz4D*)AIpF=k9!B5|s{;pvGd>-% zp_(Y<_u>OUB2Ep#XHV4@i4_2NS-SnT``8WmItEY8Jlfh(NGjOGR$I>kzB)+;JFTD5 zOr;321=o=vOz4l3vT()7z$qsI(^yVPv^2b4Q?=aA!rP zS{TRP#Jp>qnn^YjY@8z8#{Q^b&gN=wa)tK{K!;1m_41d?#1>;IXo|C42ThS^butOo~1I7IabOY>2AYza^A>y)=5OLd!15q4{eVY0GZsv^72)*T4gPtD)4}wfNtW!Oa37Et+))z8Mc3StHBeRjcI7Z~x zG~>2&(ycz|0{epW=tr~!Y{FDuYyD=TEIzGyDCLev_>S{wt+=edeLi{qY0sx+P63%u z*Wk&XPnX#*tl8?uHJ>IS<&fsnk*=aATLbWP$n)uveAj$BzeUZbDMa6%kHAjczHMSMnn01N&41i;hAiqOEkw@b~HyeiaYuu^gTvDh&pJ!0x3jIAM&7?;h&76;DSkMd~yHE zteJ>z4jC&T`jvs{7m0GKRk{5ZUv+PwgI(=Q`{lY{a={mc3ghwM;1LJj?=X^*yEL)i z!M3m30IE=q4P({B(mNP}{~@02PR8haVT1_|WMhEFZ}5ui!2#Im5f@n2s)anV%hvax z-xD74);%^{?SesxAF+_!ib>sST@JH>4Oe`PLV4bG{V1qc>%R%I{F~dh>UCFq8Bt7b zg%jjg`0NZ3$&iT0q5PZxfq1a^rUI04rXaIA@g?#2reVlHW;h5qaPt$lek+o?4+xlm z#AUQUeG00Zl4*bTx$SGrAbtfHAM8k{7L0Run{YN=b7AK{sJ$7>eU=7|=pke)i43Aq zHP3!5@Rz@eRLw}uc?lG5LU<0i^pGF}C0XkGh zYM|$~y}>tN)!|hyI7&x<9+EIUm=~K<%-y$vt3B*v|I<61VjlD>^=; z--`1BCn-OukhxovwtLR+uG$shdx)^uVO-;%^!|-hG%lpi^TpO#!xw<{`CVOJbUKEm zf6O0C1ML!LJdY}NJRbI|X*OzNAA`Ay&}qPX)t>rekXAVl+A6pI&%l*Gw|ztLLQoW! zpl?_!&qf{4w;_Kg@wC+ILEOz2`FUw^vx;4ViWqK%{Ij6Ew$;;PXJ{#AJ4EJT`;FhRTT zf1iBuObmot_)wqFTZHT#ekXJG6#0_)f24mdhxv9 zN82x~6V#1MUJgQvGg%L+e|{)Fdqi5J@{_H%@N~%X@)Y1Iu74f}XgT=@Bk|z!vMc{` z>!X+;h<-e>b1(+oUv?7a8|OQ|F6%GP=YwmZrhgF}^{=H1T+CD^G|cOyMT$hGs7RsQola1B0Z2qD-7{*gM#}$ak)J#7C$Ewd|ykycN0@ zV}?8HV*OS1IdwR;GN|J?osYx%iHu}J$Rs=@G<;{eG&d-?>GpR0JqT6sm&mzY$^v#e zmi9JOJFIQ52!Z>{eu-wezuV{=Hg1B`eHj85>i|bAm4R{JgpI4w6eFfqU==o1f(QTy zM2)|EF(x7whqqx{CniO|*JN?HWecAW12qK|TPdgUDSz1_$_v4`tj{kQ#{sf z_w>ex0@P}~;1p<@M_AV|DJ>50$wzSc+bG29_jOZkPST_FuR^HC?Xs4{8#3oCw^|SaN@ z#KgXRBao7$O1;6iX2vJYhonkbfG1g%axt2{qgZ(tEs9@>BYXW2CEW;7_BZe&F#%#i z+VQa;3~)>zN%qxOB$)_Whm$X;kbvONJXm~e@fl=2@LWQCA1FSy4az7;f6w3o z{M9K_%~BABtNE=b#nIW4?WPE&pUu*|7eNb}QGoB(L{2Z1HD`Df4&K~?fTW;*;s)3~ zdM_Pq99vkE0%YLJ%3A4Wua;N8_>@=yqLQR~WqxRan zTK}S4O)ts?JWIb9^u8L>XB6;^a5T&YrEzavu)Z!$%lT&zAwl(ZfTSoZ=zT$}uTKx< z{8RHb;VwhVsSkTMBI7`Ob~!to(-N%Tm=(f-9U;9aOUr4|a8^fX)Rsd1)xy%ZVeENl z51L$ZxqAY^ZznCCYaBh;Zi9rX0W^5^+pp}gmp3<`R;0F`GA zu79UDYI*GgtzSD@%C#}(=oj|-gUbZ@hMjy`Se-cgL#Qnze@yws!vPBy9>vWdtQMS? z0t6ud3BC#JiGj9^m^Pj-20gFuG-kE^yJJy>BG#<<`UF)5OV9B@leB~OPp|{UgUB3X zi>#*ZT=ss4brurc`kwU-B!@8gbHLZ!!(sC?=>Oygnphtp-r?K73-XBCyDtjc-P|hb z)`BlHb=ozo4E7SlIrk;Ld5pgqt+EDQjE96fx&>}%@G+Vb!_7%&;>V65lve^M>JGS_ zhg;5iI<8$&<{I>)!Xmde;>@?wU2_ny)^~w_QPboF{!h{xZo{nfFKYf64;h>B0N`2$ z-o`)+uZ*%}C#v~?<9fq2_%|D%VTbXBKJ3-10vvQu@>rmTKnM6qv_qC+gl`BEmF*S1 zso$Ba^)IGI3^oXJHi8>v>U^o?v}^SZSz_PzKF_J6<-DqSUzVxk#YZ(UZ*o4>auB1q z0n=q)yre3&^0Q9BhL30+prl>w(=NphIlq&!oltQ$J0RH=H)}cc{LBGsNRUWeUSK~p zC(Gd6N-h&Y9B!6x`6oYT`=LY|JV1Mon4TtW}Eks_9))>H- zyc8V9MXE^2d<(gTOm9h@_OW#)U_OY_)j*e^~fDaJotK~ z8~e#@6h7eoJWb!D>1S*DMLgc0pYg@W&d6+>nvJMiu-2)NSu;ku{hPFZxco2sH1gUD zw9;EMv8;I%g~G<|nPFo=7RbIzoL}!)@4iL%eie>ItzZL~5B7^A-{gxLd=QiZ7;K}C zT9Y*i`D0gwjdM^B7*4#NU)uHX+lTQw-uS1gBNB?2?~Qz1EEA|`R@=3rPQe-DB&D@o z6i=E2ml%&wfC&ikHVG+$00ZiK2#G6h66R*5ZKUdydgE0X7+O;hhkl}TC;vQblZJX! z)+p-Hx(3LZ3Ft`I3SQhG70hx~0J(>f`RS^HNveXoQ31qYjf$8YoF!A#wZ8by_@B4! z0QkSZrCa=O#ISUS|D)`BlJI|_U55+*r6?aq_Mze*q0k4%zgaKn`xx7CyWsy-?nyS& zQh+z8_w%1S0RBU&g4bLX*!Zth6>LOFR4^Ww1pa@1Y&ZCqV3Q6xjEalGqV(=h%eekT zRCut{GH%|%-A&&mUiHqA&}jc}#;aa-0fIAuAWTkTH|)Zl{x+=<}Y8<)KvlZhyNg-z?3{*jWyKd=CS63TOl8TBS| zIa_GK`d+A`8agE_7IZE6KB$$NV12-P%1r$^_dV-gt+CK|8mq=RVQk9GzY&G;JrOwz z58>7ldwF|SwYRXsJ`b-1+2j1Tu;N0vU}NvN8Y&*jj^|&=2F$dMdYP*&xNym!cE^2q zB&NRK)y*Kv@XPBB@%;qYRD_7u-I23|sf!MT4seAr>NxhUAyD>SlqcTRShr!}5F5aN z4eGA^$Pu&kLN*MVy@hViGpg{e;fe2geHVQt_Z;9z#QeeSrY$=0v%bjbQj3}Py-ue|ejg_SobyDIyG^oCI2;&%mp z<@mYRhn(G(kRbwvFu_-iLEy9w82Pm1T zUp|Z0?DKaUWlIqCA^Rh_xC2BIr&NK7T8Vf-PunK^Lr{FK)A&uC#fQ-Nvi%#s?K?Lf z6oz9*pqQ!0OPzr_bR>Phx(Mw7I)YlzG5M%0xXD5w)f#yVxQoG<7xk@8b4@mJt*_x* z)ISI2R=nfpV9m6@E3eq9nR@-pD68{mMw257)ROO3BA-Vd5nEuUE~BTHkzV+&n!u_w zD{L|$6Kui-AeBpNJmv~(RWV%PY(!}id%Md2KkD8*KFaEP{LhdC!V)J6Q9#9vO9i#c zSgAxrGcqGHIwPo{R;ATi(blR6GfInSXoBW(92>2+)TO1hDz&!ajvLJYCIJ)zD62xi z3QrizB9>Lj?|sgFW=V*Gzt`uVuP@0w%YE*0_IvKR=l&``ucfdp1a$pa*0~aehs|z} z%I^k&?@|4?LEs94CmjfcEhmv{;T<1jh8}D)^fV-YD3Ed>F=?W7^|is$A|j4Ggo;aJzFsnmr}3u*Z0-`>@rPC@;Vws9_LVaD4l9gC zkhex7NGHHU@!u(c=vMCo{d|jgowzQ<2SY+r$~bO_oI2}!iU@WNNMu!uj-5sl3GGzs zSVuHVOS0cnMYGw3A)@*5H;f#4=opbB8vn(7aLVV?r8lz1WllwP7#jx#eZSu1L38%; znfypq^{~A2Bq03l(uH;CiBtvv+U8N`cNFH+A+Z^T*^SQE>;umJ456G9CzDZ~C9bK$ zH(4d-dz5eCvu|g=Hxzc@_lChFX+A!n5AFn83AII}s>6z9Oj4C2tz_lA@G8@QbTDv; z9n2aB)9s&28%_#-nab1{TDahK;A&2?fEFeSnh&rL4x@~~^E~1wbF2g0(*Qwe;VCJg zVM!uT3;%_~)+|@9gchb#MpaG@CA@REPy$BU0m|4MScY~@m5L2_V22-mP1;&#Pes8W z;IGJ`2aaoLO9e)7=(!(?wu~qdknhu#;Wktto(iz=-KgM|bUlf}=_sf4_lHCU4be7K zP}Dt51;|Yeo+$imi_(~o!1zP#9VDOw7cv_w4IBv3^IugGkYbR)I70#>`N=_8cgp0D zfV>rPkR^e~r9hSh{_*<3NZ?I(4hcwo8Kyrd2|Vbb z%E$uHNs51if4LTwRJdPOq$QqBjzQKm--Y=~!p2?eh=4k36)qKIG$9&Rwh;kR&+F-W z5`|B{p~zQvuY>#pzISx2gPxgo!+62JQK#ZOP81e)RT7xN<;yu8YFD8QLp$Wi6uQD> z;8#*x=7<{A0Me4-`Y;K9gK>r;(+=@U%SLH4DXMc)a5>S+e?_xMVhv~R(2gv=e2IFH z3>-rx^M*88ggAiAK&$vSv?P(h=n1mLGLVN~6-7a;waFPPxXPINIRy83agMp(hCEKv zAs_-$N48NV#c5Sy-$t**KL6>F9n(iMZAPp`x(e|Xnxh1`1kG8`_|dEOxUejdYKupK z!$fIJelRzKRohsmbnHxX2DA|{H#wvQB-whd3PxzjY^6sI$WnOvBo{TMxEm);CJLW? zP06GK8DqZARW`$ovN0!RBnm@0)spuy~&*9~;Vx zqvjzDC@-yoX42hf=0Q^$KBVRXm%yp;-?FSMv*<*^SaaCTMiCyXQo!`UoM2~+IW^)8 zfdrslVR~oL6?mAYC^wH38&5JxuCtnviEsQNEl?gqYr3r_I z*8&k}csevZ1MOBu6*-hPQ0mFju)H;vPKJhOQ=$zGfACTp8Wwu$h=!N51vxY2lzKBF z?%*{1{J#th{|TGPv3GR6Y0Br*XbtJ6Mvpmx%F>8sGMU&v8zLhuE`AQOigyi8o$s#x zIXDlN7e!CNx!GH^vg70=7Navc5Q@^_!~-cOOykiih(vq*2qAwt+PvO=5^~T)Bf}AZ z(}L!JT<-8h2w*m4fZ2@ciMlX3SF#fupbvWBJo!1w=tD2V3*o7*7zWw zv@OPGg)v!@7GaivunY-3as=NQ?ROlKgUecEuVk8*Nx^h}s8%LxMpoDLn~w=)m3YzeZ85?)x7x$mx+?MC z1i|Ok7r03H8z_pzZ!F<3S9a@`MB-m^A;P**7FX?0eNW>{rKQQ7YkeNpP|bmvK-Au*D($l!iKC!}5T z<-pV0^daS%FjPhx96*56?gICHvf3#n-LDtFc1b~AO?Y5Iy!bh;tR4dqyBx|(P1KDC zurBIN4av6Csck!DIqjrBFdbS2A?I{Rp&g2k0Wfnz));}d#6sab4wpBN2lA6y)Lf^B z`fae!C=qw*r&2C7WK5BJ>gOzrF-i#=nxgfNDbbS>%rg>V+NZE9PO4A6_Lj(80m)JN zC|1&P1Z=?9=%%J~UiBtL;m%1i5v>&C;9fa|yo9{yn08 zCF<%&e`vRV@4K!KHgZ1Uq4?X%WGE7^_=ofd>Tk2SbJhp%abDOlbCKh3yMZT%KXrHsFA{0yTv#`5sn{cX7m*6I0l+x&$WZYt;~f8iPWSUl~e4T9x^ z$7g*asZkFSpA|YsB>B+AFdeqn6EoGPdo4fsbkqA^p&utcOLCiJDI~Mr-a&lUl#S^@ zw2RM@c>HgV&$>`Qm4Tk7i{v`cgT`lVlX+Wau-_J+_4>1b56eU1v&OR{y={C}|L1s{ zDE0mQJH==HHlpym22I8K)iI*566yG?<@s&mv(E060q5ZEx!}y&8)m)5%%|Aj$G+{M zm($VN62W4>jd>&32R`pe5Vb_@$nIP_C@&l(jPdaAmGaVw(|LqcId($e!6 zNM|uU*MWmAxF6x*ZHD3W0}raW@N`A^D~jd zl4kngBhQrPL~-`sXD4@zyo%(o9ZclxNk63a-VE=bN{^+#`zND>;qq+P!hGZUL1j3>h~ zRaTx=I17a{y6`!?R_;J}q5wNFl4S)<2x#`LcVzv$k@$n(D&{aqG%tvpMRW<_RT|&l zXp20TVw#&)ivYo9mj3KE@%vodI+*Fb}(=@u)zJ`!I z(@tPkY!G$R&IsCx(9USM_Db3rK|7oU+R#Nv-E6kKN~URJ(ituSm2AG5Nr)i;RZlGQ z#?h7b`Aenl8dtPWsFejtC`~d`uxmGBZL`nBHJ^H!4$eTDV0_Y15l&7r9!%F++*8lc zs{@Ll)l9_@#82=;oCA-WlJE~Lbb@nt~ zW!0UL&-M^4k12HC?r(}uH1Q!LLzbeJDTPbEaVB*qrG_X#XAAx@ly0*EvgAJAK{V{>)<#@q(O9ygSL^G*){NynNbfumyauHIz1m!do&Y| zi9R46b1yZDJ}8MH5p)dBGE(7QqYuBGNhgRkpkp8O%gN^A-e5j1L^rDi@a<8w!v81k1fDlAjS%3yvgdf1GsmP@j;D+ENz zupwhNbEQm3XKs9|#?$J4XX6vl;5a=JrdIZxfwvldF_y*6N5G~`_1hk0uaN_Gv#d6V zidrh_EFRpgL>v!p<981?@Zu-|x!uchG!q+tkO6F%t1XQ*(PKpiFJQzID5L| zsSrfsI%tF-n}oBqJC1)%+(GFamg1;2&a3vvZ3U&3>Y9-Udi^`H9UlTMn1|)_Xp9xHtR{U zt<-)KRhs1ZnP!;^XZHB$e1n~(V_JHGfpmPT`sXBndTV{!??O~KyI&IpU2(bu-NW%V z?9VdFeubZGSAb16xF4P?+qLl7p?Bg(;tRnIG#YTB(pdc#9HuZL0<#3Z01_ve=o$p) zYWrB0D8#WsGDfxMI!jnMa)X_gCYe*5wN0W{3hfC$0;loGEA4Z3<{AZQ|B=j}zC-`Z z=*la_G?DzvD`$|8ITy?fw5f zT@%ywTj@uHYzOc^HcdZ?9@Zg`VHI^9+8EAAk6|a-m9qXJ6E8jx>W}_PdrmlHq6fa` z?NI)9D8K95Du~>})`~bO4aE;r;ubT?5#>tYb#z1sd?f^4=dAUtweb$ny_K$LFG#$Z zl=|X8g2dtQAIq*BGI=Y3KXQcvxY_=Z>0DcRB>5ki$D($YM!$#aDi|-94Wj*n@jEme z;W)S{Y;CvC{R2V)-_@Yh@fl^!iP2|BPW7XmS?=!Nn=*%IN~GN6kHIG_W~Ub&MGO;1 zv{(eJXEIyn`qyL=VUAp_kR=XQ=F`V^%%J{U|65ky%b6PD+pQ5miT_Q`lR@$8Qrp$C60-O+_)2?9&)(_V-^E=V6zN6Fq(w zwob#!WzUB-slu#zlM++wa#zImD*EBJ~hs?BB+?1}E%?zClm8SZA-;n@jE) ze~spc11CuPlhrrPW~^}HH1e11WOk4hxRLVzMJ3Bk2?Ha2Nquj-8x^PQkORa1x#>%e zW<#ZlRs;96c5AuvF%YJxNIzm`GgqqaO+JycO6=Eo+g1cHYx3&#(_IA>pBO7FRm$e^W?SbPvg}!{p@!j?4 zLB_YdwEg(f@avlkzv49f?31KMhTdmSF7}-Lu<85@%)UsIA!_al7kVt180F%lMzSmi zJx%E(<^bp3-Xt*(S_{UsEc%^Nc1E4%_?r}-Qla>PV2&HZ>>4I(4|r*9jb0PRMK06E zU28(S)`c4CS%Ya!^&DFp9`NbK3IExy5P8+Bc5jcKDc=1HD(@axQW^VH);GeQ?HAPh z2bQ$f`&}hu#F|`$cfFOvvRfm)Q%Pw=-2dbccLkFz`N2f54^#>bSU`0}LH7ksp;INx z!Dt9jcsWBrIsBY!Af){d-zol+A87A~bG`!jQq z_TsUg2v9fH{Z5_xjYfG9Ol~O;CVbnd$yak$*9&T*MJ2&qtAe-89~{D@ z^Hpwe`F5@jxo3aDvfgck29F;|3bd}fx#%w!+qfu`3!4jwC)%}{ixRn5=^WQhwzRHI z@}QB8c_T{3y6X!|0tI=U#=1XDvej-liL|rpFY&WQL*Ov`=CcBh5TiAHR=}8+8*AIZ z&+UHW%HCsv!V!rJT0Ko;nc1@UHg<`(Zp>Wvw8Eozp|@nd6E3fxkRmnv__Z7A3o~AUYXuMw6E%%{Mbc!p*Wp~^e8`N@h`L2=SA|9$ey9t4|q7s*)O!2Nu^97 zL?bPXE2qR$q^7N+iDUKom1-c0Pk8l-RXgQbo&1m zJ|nL0ESa5~0L<@;!c7bvP5uOYZ^!*XT{{zhFL&uZI^#bVI#1ivCjaS_&yA*j%=oi4 z1g+}cf#mxdj^P-w7Dudk>ZW{cB9OF4TyjlagVI=^Y7077-0VsLFQeWtjiVY(vaum|mc@$gk{v`tIQ3&4kU#Lk{yASyeN2GV z5U5dOkC}1%93Ilj5^=23>}%WgSr(GfiYyL(Ia^+?vujNuKCH24IX1|sX4igl1B!dh45L!)DtGmZ%8kV*oFN39yEw7rLE*>J=P5`^PmVAWXFU#ee#sLm zi8GBuJv)4fg2}OTH>WLKfP0x^AIU_aXDEK|i2_6crhQR&XZ-H!;{^d(7`Uqs=Rp<@ z)2-Il7CBHfXBc;XhAmF~aG)vm1NkfU|12xwYNaWAQ@1SQu;=}RYg5#L+6;RzPN_o*L;sVfTGYyaZYEE#{k{lO~T=9kGIqSWO=-v#~J;t z^S47j-{RaYILg0mzhLM&;j!pInN3>+MjP>?2GGU#*d2u zk{3|dOdt$^Ud&_+SXp@~za)O%dboL4!3$IkoyDn>YB!*5UjRV%iOZ0+;qVd|9 z^e(m&|08s6_MK@X>DcH^snVRM63=g3CHDA^+rEa!veqzy?x*c9_D{r*3rkd(kubtU zC+9HPQPN-LV7&?M4O<+#NWM;br_%>}@0>PkjH&ep$Y)I)z%p*>Q8FY6RIbUFbtA@D*(xVSm(;zV-vD3Vb_>!hdQFSw9g-l!z_BR`{|&Pqu9|1h$z+ zr9CfGNY?OCilb`^|M1OUM-Tk^qf2w}5F_Ft(r9ue+W(NwXZ=aJCau_eIpcXQGlnDV z!pwDcJf9@A(;2^4&UiLRnuxaJ8D@Gki;r|X4d49r-wyn`(0u(@`6!V)p5E=oWBhkm zgZu0&?R>I&a+284Q|G3mXNY~)(+^X49zpos><_-2gOa77`kPS zd@MS*uh?3tlX$i|^UR~qPUBgWFQ6?77CQTRWaspL!;YEt$%36{$Faz=8twr5i6dF=8$OtWG^>vlxLW&ka^aroc27N;2ORl@ zS;gWKp2Kkb2C^=3Ubt=rM4!ed43xBj#@u)EJT?pyI3a9@l{wG*JNE#31?APTXCMEz;zT5yjOGdZLU7bjGrB&2G~bt#(x3< zI?>75zKqf`0{kyz5ndqhNg4t(*jX0)30+()Tg-+Gs*`(xz>8eiM|`RfctzgD3!ju9 z1A)JAneOt9T*uC>FMO2W3gY_3Adh#c!hL#7u0aqdPeF1(RlN88fs^BG%qco&-~Cp?!T$4Fl}Uxl0|N>cv-W`k5-s z-OFXtD?E*sO?Uf=NydKtEzi)mG98qq_Y3$ZuAIsI%SwFX?*J21Ar*slH+OgGo$!Q8 zft$^(#F9rI&;2~D&&5CKoF{-kcb9$FI{K5~#5O3|s<&&F0X*ogLc~5o{t_fA@9OP) zR|^sdEF%U^1*^3oRQ0tvjj0hFh+vHW)I=}*H`+%OUoU4n2x+)C+AsbZ7oNR%QAz?jP!qt#Ngufym>LUQ2eXK+kj z^hWz3aaqQPuHcqB+|?ryP9c0KSB9-y5Hb^(Elw)qc%l2shzL47AG6nZX;VJsN@4@I~fBI&%ktksN~pB zj+0k#dN$mq)?P`J2cJ#(rq9}!pFw#I*kUPrue}?=PyBa^pPb!iuinYni8DAg=h^h7 zC_FrqLq98(5ta7;6}UK+j@i%h#@#(^0oy>GZd6MQ>ZILiuz&pmiMr^t?5GdaW%ij} z=?Ioh_K#>P6>MK1))%D2E(OA|ABi3PiW7@^@){gk0u`a9etaspTb1h1_fi%^a-E$| z8A0H^&Nn+mx5@j~=!=5on|Ll*eohKq&5bm0nNzr)!dZ#kcD)3Dt{xV*oVZmXeC7d1 zHv~QulAC*7QG5rr^UHZuyPzPnS<1c7$wSK@+>W3KPT_xL~nuUVU?3V z%ro5M#_=y8tzsZK>(3%DG=9teF^B^Z;rKu)){LJMU8{`uA=g+A@cJ5^d)ae&km}@J z1EphD3VhL7vM*C}u1-d(u$BCNI^ZgOuGbhVEuiNbu1JY0a>Izg*;dOdZl!K;Vr68=@s{ogssIa5vR7 z8^dMBbR}1paCB@`L!Uu9H!?FM6Wn$3FN0WZk?1tQBzbTQB z0(xt7qx5Kim~nBOoP&PC;NaqlNdgo6s)9)S7k(+O6yB;9ZBUBdNR1R7L{Y6+U|*3f z`H_~a5ERd%^tq<=$y#w_)>|kg>Re|Zre8GJcV1}RZt*cqEHvdu?`4F~vxCdPz;Px` zZuV8RrkzO->RTN@Oa1=1dFeQlCROb-zNE34It~}>ml=ZqWu%A1i{ zp#+DwBqH!BOVEkpRS9_|Qa>;qGhnmGL|wryybA>#v^1QYY+6C85c=eDB$r&#F&Nre zGj5oAGf#D`INjaXDtKc5@d}fyZ#Ex@8wN{drC2ICmcf0u(dJZ~baSMP?pHTvKqe7S zuiK?pI5RDrb&0~0?vS3b<-h74O!cE!{83Y#HHmwS-m`~El}Lms^90P!l+J#a`ctU$2DDOCq^x!{85LQ5t5Jo&M63uQjT2o<{EFBU_OF&eZ;7G* z!aI{~)6nzNx_l4u z3bFqe3$sMa@o_rKPFLPQ=RcqJrT_kQPXzh`Vk>yL= zRT3M8M{UB9xrbEg!cfb{@~UcgrwQFSOOY)-(T`Jof~}1~_q&Z%+s_X*u=5>iT((tS zC4*J1MRyW5dnci)w-F@qRf?ll?<_HA>&PiLKB3J#2ve-)jqZgTH@v-u-r zCEc}k?uR7>Mn1gGst~idM!!8nUmKeJQnbeC~%iexf#!%aL!zKvqTSGt$tJz#oc-;9xnGk~IchE{#p zniIBw0G}=7Gua}>mwjcRqShnUSnT=RdPl7DIG6Tf0feln&;fp_f=*~`c4BBBlH?%b zw~R-`m+_^2_gpbG#KvZdmVRHnk@pD97x~fYnNF&c^Bnose*Hwlnpqq_^z*Er)rxb@ zv%Zn+#EmmYQBv_6ens>jR~`ei-ls$lu>k+JSUB0Je0F~Q9WDVNspk#akp z2NpfEN)^^pC?$DI~IYigRrXW1Lm zk3@kxwZz_v#D95Qk^9OzC+{F$DtEPP*^MNk#}SSS8k-`d13V#XAdan#B;KtA9CCJE z#QKb>kGopj1JpS&5`RWs1&0J0nXOv;BG$=~^4p4VSwK8wsANO1=ANev-*2@~W}w0N zYu72C0reA!KpdyEW;RQj<6zmv5?Q#Glqt8@kn^wG&F$8Pn9$q>lDLE_Q%iXfKyz7UIun*W+qehN`|Q zy7R{x;uP22osN@?C%7#ll|jp-=+d-rPOe-DuuPQbh{d{1h>c9y>0FkX%Nbm%Wz2@0 z)U`RaV=;>h4let@sh#ZF!uVqUGBuV;O}e%CYem}3V@)b_UQ;ta?&NKItXUq$AJcwY zkJ;uns_j`* z2P$x;YJ1ky!RAiebLIl^snQ;Xe~@=!>zS+gK>@G;CG0tDYiQTUKAkp=t-Ux=a2DFe z75$!*jqT$2wg1~)KWrR=YEp%LBQ~6DZNMD6hszY3OVJ2@Wsz2VLx-!xhN%$2`|WW* zrO_$#L?pDCdtn`r)Z_KOls8h=d(j4!V*0&-lj3ed^e2D1q!%(I73CdCPu?+W1U zV(v)X+DRRZxy<5Ci>Uq-9Y;~^iTz)4J4;LJF(sJsF)ZHeCC_{nP0Nny~0xSF|rH(c_puKg%Qe{x)x?y!t9B;0h;Ro2f|MW!JD_oR&sP zFXWwMb=XNu&doYmcG<{%<>b0fEC`#D_FswdOJA2TkHTQ7wYO@&WmY8JF=n?8Ln^mh z6F`b>FN+(x9#dK`~~T*-k9!#UoLX{ z>5NOCOD9jD0C;0Q{!5l`Pk0w?XY^5feJAk$)_AZk*MUA1G!_&-HjM`xMV$&Bdo!{q zCw6QNTa>dXHu_Hvl970@y+*XRD(vj%gzpBc&Oum2I-&en2&jk=m1SYcO}hL)^V&?nE|n_k_8tFn1s5o#ZVj zTwpG1GT)1Dv~ZqzI*rSuxtz}BY;!pSW>fi{P|wPnhcOCxZb$GLCiFHpGO=%Vew{eN zcctaGWW2(f|0YgRKSSEUi+nk@0a*-5$jqdwMvBD)8@T+9xm=32TFpG=*I1?FD<{wF z6fK2QrB3^8>Vgt0?Z@Q;lY7cM!GUs3TYqvcew-m_j~sb%3f~)fA>9;tAy9To?6kY{ zx{@S#+OoW{eWvBb5J`(}i}k#9Qem!01`>)nH8{*)-V*GEzZs{872H zAI=ey+0W;s3be~!)Ez3yXgXyeLpZgDjuwW(pjVqV?v`K zDWV4H^>0gq%sp_tO3Dy&6}sU+`Fy3W2QvW>!vHo{ld*u~CD_p?pNFkqx_VpJy2`}R zll&AIj-m38g){J(x6Y-m=o_-vkDUUR*cX91eZ%$uip~g^@4u6+jFV>wJB3+w!tRYm z->RVIptZ_Ah0e=99IV@(IwDa-|9c(TI!T=FIrHb{s~M)ufONGYqll)5(B|G-HhF>G-^{T71%la8VXU2g6i zaWzIH-Q(>=-ub>$NA#{*e}3q#^yDqogZ_r9k`u_uW~U<^z{g)J@B!LuG;@^7>PB-Z z68trDS;^&J^)jte9yE83O1abAIV$Bkb0;E=?=CWTj!L;(?_{4#;jhf)bS};Jve;HQ z!91PG@g zVC7{FA7kqL@&-H())xC*0$kOn7D|5Bnb@5QO2T6DDtQH7@iOk!bQVGPB2R?ao>tqV zl2__qBh0Y)ooNMmFq_KjT&5HK?)VC~1vM^*Cj>(d%4eizcEB20{duB+Mx(+=S!Rvd z#``se>Js_*r*LFqOf%~p+ukBy0L9kXcMXu@>+E%>@+*O`5;ir+cRDqSz}1)#dD=32 zXs)rBw&yiJ%G2XA_9jOsr(G%%bJg{HK1$E0)AotABPs-6zVjrT=xN zJKK0=9#f#JzIuvRdQotbh^z{QS62Sumka?7iEp_8@eyWi+xLL0Ca?k($DvMvS$O!Y zFY)uqL^gGl%8N)Hx=m8~47qWZyZT%{!&OpnA|cW>#pdQ(*Ezu{`&xm7ySl4XTi)Ql z|3xPE?C_VY5@zREIB_MF?+aCZ%F6I-6b@Ny$JPcDK9~FTKEYeooP(3RrcW@LVvp5Y zv}34sVQhVwdk6ZO7*XU8`z}*q~nIe*G$!d>nKwtl^5U-LD5brHgQ7 z%H(It0T|j@atm%`xY62gn z>I7bsaHPmOS6=F-a2$Zv4=d^vu_l$IE=}LMeAciY`sh4hAH7VTKM+_U_!-^@hCOqFiEG{4ygLb-Dcty9B}vwU*p1!8 z%b!u+68>JZjXx)hbq~&KmOm@mnSCN}@4eQ`j_tvD?A^Y%yIeGJaXI|QX{J?k$(sB{ z> zm&O_;B1 z{3mux)q$PLF}QTQaPeC%)UChP{`_CL5;z^7`T2E0Yn%IZVqZ)345}M?JP>p(XtquPMft#iJ<3(>f+xjWi|`uO@!Lp7|J*E zvk%aBgEc^9ot?+4cKYt0yi$GF3&kgMeAS;B{xoZbQtc?dl>ir%98;=WTxk6!sNug1 zP4YcT+^Cu%8|`15Oe={=oy4VX9#QT+%Go1%gS;X2r%aqALYX)J;7!O{VgEp0QmJ_% zfsP)jh^EW+v)t`7ISJIHzX#ec%o6}Kh70FWBMR?j^^re%HFK;^-P05IvF>Z#I4Aa``;CA-)|xs4^dsY&@t;iR zQo=K5y8B1!-2F#4xd%K}%QN?YM5VrB1X4Qj?(_Qdn)KhvMDyGTTK}Dv>5GE@7`&Yn}V``R>=3IttXfxJS^syo4Y$e0Q$;eCR;@5CUzC zI^dDEzu1zgE2nIk;H+E8xn*CEou=EBVj~!a%)VJwk>Nya=-|`;NWaF+be37gTVntA zEO}Ns^G9M+;!(qd{K~^47-ZB5Qkfj?DoMK%Lx1(P7QL0CCUx!H=fr^1{@@Ijp(Da% zLj*_ql48h;>_&9#=C=k8JyI<~jn2728tYil)bKnbXoOQ!cd5|((y5UN0g-;DX2MMo zR*B-fhe&@}r_>yK5qPHf??2H;D-kKQKf0OM0HAp9RoZY3l*!yjMT(gYuvS8m89Yae zs)(Q??3X-~YNpJqwI zgl6mwvG0U_zHy9M!r4C#$&F%#v3S+3*})L&{W!6VFJ661RC4k5s*K(*x;!T4y71q+;!geVJ4$W}j;fa%Wb+yA-FF#HB3>xdU@Ti3qeEneO`xa$u`@&lUYxkLG%wYy7Qd}~>d63^1+z|3AAcn{K|DgtwR@VcWupr? z#Njh6bsV9lx)fO#A#(QcaQT>7Q3A17hHxuU;CSZC?&=ciqa)Sf@@?*Fu~-QuX4S^` zj;NGK`Cj+^_dyAm@)4Pduxp{!w{wmw)-z(YU|hRm=V(4pwYQjS1oCil6$QIC2A9~u zU7Lc*&H1;?IcGr9R}+CWmxNiNlds&>A|wLt*IjrPibCW)B0o~@7rp^?ff^ST-oZR! zBWbUe!%-*@)hpt;TGb!&)?;eppSkltrpJ!Kyvy(7y@qxBOOc2p9w?O!;xb#mCIxb*Cun+k3=D6j&l2 zv=A}KM4}&)I?Fp>E_X&x4VK4diDP}2SgiPK`UWlb#qF+SKi{dTvDxm)d`3g)F`@Y@uWLUkw6r9sZydHGKJyqv3NwvzD4#1egc{#?XqHR6T zz**FPabi^IHs~N8o0;2uh0G1_E=01mmb>r2htlYm^Dx_Z)LIuRpA%aLsya>R&eq~T zLb3cj5@zvWNs7lM6N&=f`7L78W+y2;uQ1Om8Aixj74alBT%)EzRA-=$XMB zp-qrpYx$@`*qqrCa~S+>b*7%Po>+6s`FxJfdG&IP!rol$;9|DwujF44xQWu2c@VZ% z$O7q$=E=D1lYc&~WuDDGd4eZ7?TdaCLQD*LWfv=ovZ64irQx)+`@BPcOve8}zL}8w zZ(o-M-7oAZi&!ac7V^6#l-yG0vljc3TS<4fkO_L1kf`-M;O+{0mIbWOeaX+e2)vV< zyD(o2b^S2Zb)C;z;7e}k5M@k#rLuxEF`+DfxWgt|6ZjXVi=A$+KBswePwOdRVa*67DcpX}?m8j=mA-o<*?#kDxMSky2*K$|=@WiX7=A&)NIzT|bOT9_$cNgyhzqi5f*(HBXhH!uWy8>IEt2K}~ z@hBwJ!`ce%JuB!18WUhu+i_D>jDKQ=KB-Xy`Ne0T8rOW z3b6e`?u-)}?dDzPUFoy_7w~=%@O-TKn zooAtUne@%G!0@Wi1y;cUV_XBB`dnYgH$3PfT+U#2`jKp2a>Oq3CmtL)cXE%>j1)+= zVay#6z4e*T`?WIWN6MHVAXY{F^c5(BKe^69-}r&g5*ZfN9b&<5e@qZLUvJ*m098F#aqU$m%rtuB zbDckaLSzjT0r9|8eh>ZH;rFEEDGDtz&F|UjUFX`RvKH)sO~huYYlRo=B7R5L>4=4k z2?0>NZuEN~*m_reqWa9}_9v3&banwvKcUi|0VtsMH#3^j&V3upJYh1}+5lt0G=UPbYC~S$>R`5v6oaIT|s+ z9g5FyZSZqsp&xX5Am|MuifJ7HNJkRy9G?976aSHcYnOKT<9fKK49o+lkl5R$j^(hg z7t%=*IgWgrB^D0$O~5tJwbr%YXKf?-pKG@@7ny7|GnMVBik}o_Rj+hswrZ`CQb6;AMy)t5oEL zieUz{MMz2n%oaGwLP;^aP00o!s`{srZm~8`uA+Ifv8^05Z_Gh+ zO$N>N88kN?1kJ4(H1BGM=I5)U7v-S2oRhBw%^6L*9Qgr`eXg&dSw%BAQ#5mjT7Y5| z&Ddi51yO4`h4>H@Bhu}lF1gd||X`0!g8 z5zrbnm14|`VIh`%1FY=#taBKd@dE_}8;o3jYrS`d;>j@bDP#xsF2@*kWQv#z(B=Zy z7cSj&h&pW1kn9 zfdK#q8?h}+jUExOwBhQ2^+{W%u1q|5X2ZQd`nM7yX5!Zlf zv5N^O$yn6qB)hC5(?N3$Xa>iK?P>Nk+j7v1(8ehS<0H5It}O!Cz5;8RttgNYxG=QF zx&tncSo6ZqP?7*5>+i5`6+ zotaq3*4F{=X8{k>hWC7~|M^^;`WtCtzK2!PSC9nz$~D}J>aX`OX>1f!A&6XSRYNhL ztp>A~-msvva4_;pQ7hIkFC9URLRKNIW&r4bE z!`?JtKraEX;Ss|cDw#hIc)wIa!BT`)=J{NNBuK1Y8|6FADBnuoW%lp3YT_{PPwKCvoMv(~!#BRqH39Iw8 z42vs?T88d9rR+>Uzj+zKaFDXTK^MfcFol(gb~9H3OmKH7^mJ%9xN+(wN-f) z-7Xk@yW21B6x&od347{zX5g@EjVN2J&2_2)nSY@HSt%Sn%e7amoxUO?OHmxC#4hqR zYPQ~koDvgr7AjCwE;P>Q5IKzd2d0MvrgjEcop z);K?yPwXkM`^n}sc5(KhWS1}`$D)Egu_M4Pa!#?!9oU7%W?6Im@zEI0Xss{2b6j5t z1~OXqFZRFxn$fc21@WvDI^UBPy!!>EtBn1$#LpxoFhfPJ>!b9<8?gt!2CNnzE&4?C z(w7x}$i%ZXU~Tp`CKCj(>4@KP@r&nV#jlvjB7M=Ri<#y5tU8Q-pJ%Pl zi+zTb##A~hgb_l*vPj_@(-BKy!sO3PUrY)xqRq9>`ouW_o3a=t>>53zS``C{x$qM0s zwXKZ=X0q(rqLwkU9~8IDewGAK{Hjj3LoMypX+2movme1KI(3E5S}HaZxDk6sor^H* zc_()M<8ql(Gap_IBl!{qM@xjDk;U5so;?Sb#bTj?)-B$(g4H&XcIf`B|j+@cdB3jl@ZiR2vv`i;E(6mLr zJiA|3(;|lJay0GNj`qOe>hr+Dj{W2lXC;CL=0znUj~#!}HWkQFA#Ug~GaYf9%fJJ} zcLh(*GO&&ipN-g0_@J4Cdv=SmwNUKn95gd6psq_@ON4h91g!T})q-gV7W6Wo7C*O4 zaI4RJ?VXuti5=I^T11oraFtITTX+i|SD(jtX%G%LYQ%<;*{wJ*MAzmxFlHf*M|_sU zxOgwpHWcl4WJ4i}XCjKd8^ocF7;dj)U+Hm7xq!*&SWIxA1=}$BQv(v`s4#9u7O$sI z({cEBQWe`yxf-)X9NIoF)UgQ@O7!a{ziWk&#Nx0(5)Z!U`IVR8;jC8$+iM86Fj!Hi z#g~4Jvw(M=&oeLWf5fo-79&b5MEs%|Lke31+C?-$yJwDA@Z>duQ)Cn>HtW0Cg%LH^ zXU)OWjORG*ya0gWfH7+^#(6P^xvzs)uxUJ~i}GVLv7dI_e)`Q;ps@q{NkqAKsb>x| zwY{#@=*pQs7fy9mvN;}$w33}AO7?PlqGW1S6N)%Zf=wMI+ftSqG!%UOdYEP|C8F~nGa zL{97-mN@qm1fgq@&)N}>)o`IIUc8K@EV69Jt`Z%O`yE4Khld&aK8NW5JWSU!@noOd zZ+K>+5EY!*?ZQN_f!Xd+tmWEoEyl|#tda4uF0}SryV=nq z?NIjC8ew#dL7kh^R-*d6qD~DDD!>^OtkF~L90Z_uk51Y?VbbRFe&lm~fR7?Na!c2I z;6CvCz5*a*kuJh!hPx6wlZ61!LY9ME>WS``c(q^Zmm~is;A2hEI60}AY1MWy*5OO_~Wbw!Qsh_3KxURTTeZMaz-cc{| zrJt~LPe;~x*IEm_dp!ulrTA22H8HL$wfT<)xpuK|qN}+18Va4S+v)RlSD>E-jQoFI zMIo>qK!&>ZvDi&ycZJWwf~4#3<0EHzJvc;z0NqnpgJ;f{w2)jFZFo$9t<*0I&DN3D zg$33nMbE+CK%@)*JLE%Y{?Gp?kVH)l7U2r49>IlZ}Zpy*SqzaNRiv$8|&U*c-wM<0tYW# zCLKMDi`BfCCogV1-~|CN4f5ui1KzlAPs)qS+P`oQe*bgo+e4G*xLXS-^R;{M2YYy= z6`k3>JbitC6K}T)pjxE-=alEp^!14L<^7bG+WsfycNNgg{#|^)?d|o_q3s{F8yw{q z^Kzq<|4iy%PjT+dke7B){A*smFU1#0@x>J9&J6dJ4vKH#B|SRF{nY`ZeYAt(Us60f z*e$t(jdf6bJ;l>wb?^T&cc@o%P+W!z9L{mSC!I-BydD6^hzMLd$GuUWZsKV(Po?8u z$y2f_4t{@|fRYc|(1jw8GPhIjIqvuQ8n97rPWy6;s8iZ*khgQ&zU7Gm(0Q6PG{@7m zQurfb(_r~>_tfu^-Vd6b3*U@e00-UPM5Bi-*(Oal77z5Mw zohbN7!fAr8E^=_+TNQFWU~jN|^_^!*I!f0{_irRfmw@65_x%Y85lh^CUWtTYbLw*+ z!U;q9KW5|5CoLm*7fIYRQuiG_FV$4F`rK8w>EqiKn3j(^?3LZYUHFmXz!%6t#TK8Xr_wSI^J2L!qUGAMfiyRAtvaFNm`;uFa4enY_ zKu{;GqaZl7-u-A@McM9}!oN{l^au?1Xz5cS|&L?*5rk z#Qg6IC+-@l2i|g4BfF7ezX&EKlsr&)`!$M!2q?G(P=mS*s1*f^gI!lR0|#dAzlQIh z>r2|;U_~wt8o+_Stgfc8jOwDtJ2=>7ca@aYW#juZhVwEV{M)acBs~(bf7BX4;Y5sk zIVg2V3FZgMmJ$<*shuQUg`z@qi+uvblHWa~WHxbS5Ny5OtqN#J*{V7cMeiaB8(HtFb%M{^_Up;r?fdoCsw{9t9h-C66Qv7IbqL6w1uj6&lJo+m*q%o6u{vA}@|#NtmW2Xlk(ke5XrK8;TE+C|?pF zPmr9Q(QYPs{Vzw$vMBMXaykgR26X3ohb8Z#q|os#mAS@8hW~W)>EK`kBmvRV(v87* z;j&K!Zo>z2S!XT>a`{hl*`LeD&1D&v_nXV!Tt>}h376NK%OWl>(aSEuhT#>`#+a6q zK68Q6p-WdQW{>lNYRx(J$(?O`sQVqpwEU3l8`=GKV_J6J(dNG0{u$2uh#yx;4*r;Q zW4!FNN;bPB9G#2@LsW8&X&C`5?ETkhO3D_tXu4!Oyt6-6@pYZ4FhLG(S7QRs<2 zFw~n2>N+I?jmMRf%lM)X8;zy>;aa}CmLJpd3qFvoE!BdF%>EV#v#O!Umb?R1Z?EKU zC&@^oS9Z`X5A%o{?-VOE8&syum0Pro+_pT&#%QR|`7m~Py6~|=bURh{o0|3$AMCu0 zej3bFVvnT)DZ7BOZQk^DQtIA&B(nG$&kqbh_KM8(>Y<0_b#)Z6|_x)mQbkUimg zBU zzRn%T*3szcp{l!&Ar5ERsJv1f`Gk{vdJ?VK|+^s5K#aIfrHywLS8xR)~&%Ncxkb zd^@X;l}TWySPQn_v3d5r7;&jjBJndgqfj~MdvY>_hv64R(lyB`Bd*T|E2q5-batzS!H^(NhoJ4Y?1mz zs!P*82ha;fg!YTlwExFaLj?)cWIKD>z(a)>h`KfW^X7r@2O#N{)4n_scw8(%D6-!| z$N8$>IaY;0%q^G7`q2+leK~(DVcyt3=+55w%sX;In_~78)Ry|Y!jMz7X!SIK;mN_D zZ)`OPT}mb*pQalU@_CPKa#lYfRfSWnI7&@ z{I7La*T`3j8>v)qGZ0oy9h0Rl_V-VjtMHjJPs&8ELM9fmqr!d^ny#9(vQz9Z&ZXo0 zhtl+UV2Md9?W5>{JSI?Poh_+HQYef-la!bI-jw(s8Ox*u`hT5$vvf$#AOl+c91I_s z129L%z0^}A7uh2JHc@P!1LSx7e0ve5EW)9kY@5i3D!6i9*f;P{WXF|MmL0Itk2xMu0c6PhE3rI;TeGS5C6BXt+`YWv&=&EvuYgjATNU=F9Swi#*ml( zV+J~-u9QaIA>?L%GxYr&*ee3Hf*ut{=gOPG=FNY2lX@8u3M(tb>f~F=KVD=C%+*gX zEXx6?TX!B&q_-ByNX=;!&83GlJTMp)9^KVm9Mdw6a=Ckp)Ao*o?=FJLKJ}tMjh$Hx zDH&7qhkOZA%Iq$8VX$$Lr7*j%c{k8rhV90?fwRdB;G|YjM78WLY1==6zuIWl$|@Oq zaKCIh8(&EKqtB?0%PaWZydDIp6+!R4_IHnpb0ynJfcy&V4bc z8=CED*8a|Xzem5%ktz<8cHyg>{8(vv%(X`gGt9aAumkK-r4P&(dq1b7U_w;bOcxN5 zv+JO&Q2cc<8|*NRhvI*dtO=p`ea-v`{c@~rK?N*3H+7iG2l%yjxO|_SI%$Y{i-W(7 zhO)1v_0;NYJu97hj*~5zeRG)4F>FL$_vf2X)oh!h9A`e?lU!pfVL%(TdEt53ujPef-5{ z{OFwvL4O@|`^^JCgTkDc&|6PTxTlP?-u<%!Yp>FrSSvoIR@BY`bBH^!dM-vnBP+i?+45K3s9vM@+D0&PH+^llR zfF+&Bu4PkqhyOEXq)gs(ZN~FDuoutyIf&mQb*_&H-&=%8}!id!fUiQiKusRFC8aeA|v zsIZNFQzrJ!31Ziz5v6sQ`JgdQz9+4G2>)ZAY%FwFpDlBn1MHGKN~wO+ylZT2ZYUbl zvTv6HC0D+ zg}#3mh%j{z+y7Pt7j1LmVj#{J5rBGL-#P8lAtyzn7@Mggu4Zw8lS+=3BCn37u(dy6 zM%GcHT(VRezhx#-?g)BoSo%0c@dq)-R_NBKzqv7vh4@;7d& zgN~u=A&=oF_0AZ+Cu7Lax9Xov{|^cNB^~$Qfq%gv_dUWPRfh)ur{8n>{tFxjSq@6; zm+uJw{vG!}2maO}kHLZ}4{Z$V<~d_{_WEyz{~^&&?}PR~bjbamk0a*L`hQ%y{~NFS z=KdcN{gidwe}{ew54rEfhurrY<~p!HMf#puu|JT09;0gq2sVR+(d?+9DV%%gMURRb|8No_+!l;qU@2C+C%I7r%9*t z&;R%U_^b3BNRBX)7rU$D?GdmN1wcZ?eT8p{{13V>Sk0L&vd??h205@L;;PG${hjDL zFUe>`1~zeQsO+!fLS#Qad&yQL2eQ~pcvLlcrAv(1Kyyek#-gRG<_%Pvh$uoE`+*ec z15D4G?QLVs`SV8F4@6rJ*^O_K)Zm!n~UA9RI(WkDHWhWOEro{<2_PGIip&i{F8* zL>477m`S$hb6uEH%+pVh!Rtya8KF7-lu5%NrYo}axCc+Cj04Bzh2-o@bVB zX7YjzZ?&Y|o|d})fxry+>ybB->r32kjDAh>Uu-CgM_vkc<;&53=c{A+>fGV(soU}N zG2b|#*^n$5jicppgC&z?B7As3h<=C5KXpGem2Vh}rawzeo-QDJwx^uRI-Q(F56Vl) zwGHr}b-ywE=CM_~k9Aj<0tT*5cTc?r?H#n98Y}>j6`Os?1C2&{8Encd0*E9(EAXx{ z`Z^Tv(T`lq{feZQ@sUAjT3h@NZfMgYJ_~A^TM{b&B(@}Vdsft$`Hs_ed zn}#yNk$fHB)ZYI`+xx&-Rh9eyb6}7`(LE?AD(O^IOqzmOaYS=Q2iXVDQ7lX}Oe;k# z>*6p2_y-+1b6__6ZZ9{ZYqu}=aw~7#x)#+1F-2!UVbGF6P4Q1SYU$aYvQY~L|1iJz z=UMyA3}{yOzJ9zov)5j0?e*tb&wAFgp7lJ>YU;0Vm4={P-*T+4)s;+oT8($hBdqS_ z2HtvT(!j#CYM^eKmAy2|ns{*az3*?F95<B2Epqm0LP*+ZZ-E=X6iY3Y4mI71TsA?gxrCN{$P=zTE2>TsKNf3-%r^#a;( zc9{K8#kpa_hxUj59Gv4i=&xWnQC*Dux98;dd~U&9@6L|Qmn)ZKQ$2m%IG1ayXuiXD z7gum7yt?@IF4ens2K8cYFpEe1UE;0CQ)6*bVKx)<;-L~kJEx;w>t0Sa0QCgrp)2u6 zSqE)=L8`Ae9pc^De%kOFq6(*4DON%@oI5d--;6@TN~;f_Mry)!YJ#f{4`>VuC!bFi zf7?l{tBz8h(#VAFBACEk_A;pQdz7lad4*rOjB~BlSrdg9IAl|GY*nqgg9fUD+NnC- zBhgHC{hedj%bkl)S9lP>e)(r>mr5kh)E3`sCk)@3>3+DS%dPuhvTx0 zqEQnW>lN(U3A4MtrCkFY<|P^O!&`UgQTw2`xKgMyACm)Imvv6+t83X?Rp)(kiyxi3 zLF*pw&wEjs@)kd%S(WNlx>snh3f_P&8uEIJ?^P-$yl`mjshb!zZBL|O?aS9mkM4vcCfN`-@I#G2yRF220()=5h{V==^cmu<*+D|>0 zG}aXwtXMBx3JRU>Az~6giJ}!=?D~PYRquBWyx+z9AQ=CUSA%(>{n{Z$k__~>4wPbm z$`6!c@ALXgvDeGqQHn+9h#pM9Kt9%#A2b*t#r>bwlgE&N{nhp4n{P1kwu1c-(&Fg* zGtD^TyZkm_`3iCIW4&ccx({KsnPV~i#J8Uu_Y-I}*7AsmGh=)nx$si7(zw4DjCw%6jCw!=8ARk?~syI`#< zBUnRF4InsP-s*z7YABqBFqIAO5W~=XRsZy^n}`Pb@~Nz_GU0v&qPg5AYP23o1?xNq zd4zZCCYaGDv?lrCDSr6Pp~6uJkReI!HZT8uy(ddt$Mcid;1c@s+u~ahnNYL9!<{qc zv(+oeUpteO%bX|Mv@K{p`01%FUjD~KWZ;J~Q<|%q57Sj{O9q2QjFNmg*hRm(Y)*H! zbv(x=YBa}@H&j){r(22dTyd>6!)Q$|Y+FTBb79LG9s|8g!705^lO6FWSKxm*$V6QXF4t~U;jnVvm;1jpP)_071Ob&GQ0@oAzubosOrfKPq1}yFLRG=BwxQ!p zgUazb!g9*%TI*4TkQN#}v$XqM%@e3|bEAtas-JMtI`?aSPD| zVMw?_q%8grJzO)bbn<(1y4-^OE{qr47xSm%d0gwao8PPl(WVy`>O}HlCkU%yviIWu z!kNDXcIb@$rY&;M4T3c-T>^;P5z21eq=8H0+&r!hiDJVX719 z(PJBTG(Q`!Q~6t_Rq^1-g0FdNC;42HTH->NxvnmEt<4PT!jWwc#EH84w-{6-*RpcX z(2=d(G5-kMt!7g@2I@Mh@tNiqN;!F>^|>-4%j`cdbCpx#IBxV1{ACV-%5>x?!77h0 zq8;*KdQ_(G!EE~R`6p$r+Gy=QZZtY2O%+w8N4*Kdn*I|J$|x@LbPbNilu~k=`E2p} zb2#;i#yn%MPZY1_b)dD0{wMqE@_tsUfn5%;I}^O+v(XRd*Wu))<)(~d-e^{Bj~_Mk zA`^YouiezR$fpMjbHM|-Xc{V4(6FZthVj~8pby_LzG|}KCZc?wBp*#fhiF^9P?TjisBwNvz1@;BsXZ6W z9FGD%pO^12kPlf37(^uA<#WTfKWhDn&g#odnRaOwGfyvHR1ErSnW2+gZu&}!J4@+3 zabOVq%I+!fD-uD;r{qoS0J6ay;vsY)yYnfT4eP4v!wcB{8h7ePYt`i9&Q}L5B(qgV z$K~rQ*Poz*F@mk%puUUmKoaA2LEfFl&$Deh>&A=tJFV%m{kKq$@=Y$BU*0>7RYSCQ zvKw`ZQ)0@tP57=vr1D>Ps~d|}u^o!|G=7YBQ0CC4$^4z<74m9LWUF<#kX=QhLbv48 zPJ%U7i|wUL=l|u}H|WECYCo=Ado$_>EcxoH<37b>Ww9qtdY(;xjr(3;@Tr;2uvH+u zE82;Dy%n44y=hO%C2qcBdCQxrre|8->}wiUIs%K#42b0|KF^lN zd!Cp~sijqZ?KbsVXvRe~B*nK`I&djqVrW~$ro*h|z+FHxEH42*?mEctJ)!xdHd$yY zv!-&qo^gdMVl_6#YwT0&e(BjvAGbWe1TZI4PBz$+3wnwdLbl^=>@mvvId)!X_s`w@ zdrbtfwxE{p(E8(C;dU9IwbimD z3B+F(>F$eG5r!@9!eh;9IBPY>VAxB8*@3J^xZA}gJJ#UPfF9p#pPa{WI7`Q7@@2!! z*s0!~i$!8GU)=$T*(?%cH@M25dNdLXf<4tupPaNclb-n+^rd6H=7u+os}zCImgOyb zQcXKr_VhI!9{&>w1JD+}bD2;kA47C5qOM%{&DV_R+)_OG6GNESkJD{+z-vt#UD<-$ zX>Gmx(Rn%Y)nk$89$N+N5!RSU*UA3e4F<&nOIp*k9zef-LoeKBd}s6h@o)JcQ~QRE zfY*8*Z}AQMNkkTI3*tOL0qQPN(D#LkXVWFGwHFYd9gaK!apnnsx$@}cL6u)h<*b)l ziCM|c_ga4(H1Eka)6rGzGtu360_A>ps>8yNbAS%eVe#wPZaZ6W+cmj%CJ;(*$X-r}nfhJ^EHAh*llOSW%+xt;I* zgxjx>+cl6cld|;Y&m(WD@)l20ROpp1WC?CZ=JlCj(tM`Q^VgW%tKARRsOFYm6#3gP z_v-Oec*I=0o_^Nf$2eBl_K!IgzfWC9l;tgc9JZ+*BP5)51tyS4a=yvR#f?;~;l>}~ z7%Dt-2A&RLw!Y14{ODWiS2nT^z0gnMi(x#k#iiH4e$9>J9!W|r~%fs ztHh&);c9&vn#Q|i2zw+Up~sRT_#?3LNBcEAXM!KB5PCo@h15vlwSncxMoT7mA@rij zzC}7AZU30*gW{3fg_D;OWTCuxwb%MSpg48XuF`ay&z1B2U>rM`8#aR0Txb$xH&$yV z<9wFis`_;>xQ`B?l@%#1y`)I5^#+6qm9(m3FY4JtASZQP+YVu;YFnSU4X>hz~H z-_{($fh{kd)J?l>i{HE|ZYC3Dh?q&lBcyKroQWi6@T#fkr`?255AvQWDHk`&ihJQ% z6?ZWbgXUG}!OVvQSE$fnB!D!520hzURr<0@H8>QuYE<$Yg#I)fK9Dk#N^1WiBm2>sV#ML7j`E)iI&sk_gXBDu9(z5}y9GT#mZ18$q4aX$184oA09>U59 zOzi@{OCxisEr}=u%PidYKn0|&zG%4)*vcA$bz+;!`hO6~YjOFYHC|^ltj#kC9UP8Twt&p`^~T_9;BdV6&PZlG7+x{Gc5| zl@E2avYBWHbCACEVxQw(l*FyR6bkd2?;Ydi4Lf0Pyv2`JX}G6$W?^WhYK9^7mik|e z<+IW>HNtQQiMhMIJ{D%`{3RMWKO2nPhKNcNfq~N0L~y#T$`595tIp&%7H!FGqWH6; zk#(6qZgkCVE9TN8x6OMF3zKa{W*OXQ$Jtm3WWw3IIU8%*T@871C9QPjE@0X(0D%@D zkmbBLxPNH%t>W!PD8pR>gTMPZY%tLm@LFzvKeVxp+k$0Q zKyMedH65t?oV@25G_L=m1{@8GTU$&{af?&|0E}ByZTqr>nP}R22v-#;1eqZPALtS+ z5?n~I{vLfS`G@ZF2{!B?uH@C*4bYsld#zo@&^*f}A6~1N54w^=zahU#hVwTuNNBE( z+m=g@;}Xj~){c=71gi!KY*#iK*;#M8i_7iIo;tSs`BJKJEYkANXTs4>WFx!9^q_`4UXKh;;4|Yah!kDMi0; zYxBy|CH00F=%+wW0IFB$fIi0mE+XkDp+HTr!;j9{%lHoBr?Tl+z1H)@C56>oNkqX# z*XMb^%hv8DhCbRhbSkCM*%98-DJmBjK-0RsrN

wcM18MstVatA6dCfzoY#xrF^8 zz}x999?oqS6p`|l{Km$l^7FZ9Bj)8Xxe6inH{v^ zh7Q{LIx7$p?P-)NZG_+_xa4Q=f!^XCp)tBbZzki^{Jvya%|iFT_k?KG?V35+WbFi_ zJGv_~!MMs?&{SE4+z_ z)M5tT>)-XGAG&{<-?|I+`CdlgGcs6w{!B!9VPvEZS5QnCV_3o!S|NR~E?J3#DjI6F zy5%wxoys1osj}Kve*N=U*jM|hjs9^fiu$(W`O4xlE8ULy)CVoY5LZoYZ^`>`l7qJrgD1N^noI{0I5oEo_{u-hXk%=gt-7I6J7tdE=<)&VU`>b z_DJa`=hu;|5`RhF;(s9{9N_eH(+4`|*T%olnO~W|RZ)#cS<&w(DBaOWzpjU-oC_g% z$XgN}MUCk)Bc=9&$9~FL4x2ui?d5OxpcLN#HKWQV0!30EB^qoP{Q+=3Yuo4 zB<%771OP#GF8zBi{~(PLnc>p*1wW1N;ccJ*n_s^wH{0PiSc`v-H;N53n+3Zom)fnR z=9Ujgq?F#|-EySKmfYdbpcV(ajQ$3@GvWCa5(oQJj(|1a?0tiqeNUdxZ#P^zZjdj5 zoXj3iQ;(IE2D4alTdt7jV(Tn2m;5~-XB*e*jRfjf{|W?npz*)r9}wx_yt9exiIlk@122skh}iK8fRs`n%!9E8>q3gryMi7PEt*z1tYVqhTM1`BcK3 z{TRg(#;ud7*xc55nDQQy7=G<8{xgZo8OMZmyHZF7_o7F-cmA0oNuPFX-1v5|<824# z@<7td2=@R2~fc|?2%Jb~`>1ed5pejuAy!5{Ba?(ubHtOHsw zl`tFE?o;k7;AdyC_er?G;C_``6<4DONaN{tJPheR@^yz( zaCn=Tf6!L6;&l`+bxYm-FhZGd-0P4Fq8u^E;tMwyo&hmB!`IQub?4d93M!mj&*n!n zQQQbtiX*7MZ!$Q5=O1tJ2^9ScAp{27-jcD#UNyJnqQmCgrEz90^v_5j}*PZN5=QFR>-={N~cmd3c2?DWDO62h%=^)%Kwmq@8FP;fiaD1fU z_y6N{AkQns%amuS^4zEW5|@D=D<&w)ev0j`UAijoIl%_&sW{t5#wgk6OlJC(ntfh= zvAXuvJ)Fs;A>=J~E^`pf?<)EQ5rxQcG&B1Hi1Q>{)2T=%*n)Z@r!qQ@(IAI2X21f} z2rm=qRd&aK0I6^dqrwz6ymjXdIE`Sk_oR{Sbtw~TF`V=E}TA>6N07g zgrKW@LU6ARa-6(*{ z-vMH;Lc2@`X}rLDh=BjvhQLpb34n!AYSJ!lugeC{Wh2~SN71E#r}*2)#35{{Z|R#R z7WH84h@ja3}gGuaaiTC+3;i( z8k&xPdu%$kx^egduAHAZ4{-yVmgrPzOJr-G$cEX&81II|ph+{vW&*t1V7JSYS4NxflIFO*^wYwQi9di?NIR@vF3AON#R z6&2OZK?eJS8n5+e!6)4`s=Dz3Zz*=0tOX~JYP>5Rs(5jP4spHKGnor8nuV{vwL~s$f<34jkY_uaAeyuCM!PPi4`Jm~Gv}Y1} zNqIXjtZ2R@VX@2A-SbNozShvj-K~`XC!b|$S+pGK@kbf`{pv1py5@>* zn&J2hYbeUYUh5vFBPc0y*e|Ajr+oS=k5MRko@E(z;lIcqkKx<*fYLBEdmkvx`!@)M zT3OhsDbpT5%|nv90XCv$5E~(%0K*byp?Jk3lJQ*JVtr#F8xKaCK)aCB@Olk9nb#-4 zW4%5x7hL`Nva?YXGP`$%Yv=HUSX^!(r2r1Cj1<*Mzj})T{%D| zTkjdEytcecvE%AYn5oX8tbrh==^r3plfbu4{p)l0{EJPs;EnkwX#b-NP97I<{IGIT zHr>78H!RVUsun!XMaAp{CCZc{!W#=7VH?a%!o6$=P5Gl0%tI_<`^>mV(AIzP0!V(l zhoLKapE6|A8=Z{WJ9G0q6iOD3mE`TLEJR=uXPCXEV+K!f==xq07g90WtGnlq_P77h zhvSXL-IPm@f2#4><`+x&2zZ56YWKV$>X}=t+?tF~LD_B#+=4 zf>r8JR9{1|+6B7_j&(s$x==x|{$3}WLMQF_9Fl6EglNi66^tQS0~5#Eg=7ZKXa}k2 ztE_HX1bIXFH-4|JKFAofl(0_tEDS6k4F^>>j?RW75A&A()Np3xL0!RC?H%pf_U+RGqi?_h*QlM z!3DgeX7;yGFsxrsp~UTm*YjF$=OqV293Cm&axrENz~@1Z8GCZFNm1j|I~c(QY+iR{ z(+|$u9KXWnnf&vd<=Q|@m$49pM8HJT`;o!@_p0l=z3L2Wpky}}vMNg6c41F8WSy00 z=R&rl7j}E2{AzC$ba&$ruR6sCP^MW-1$LJ}*SVcBD6|P9OEqzB+cR z?V0o=&9~O?+B}5Yq>&nDQuu{LveV`7X10qh=pI(@RaaK?CtHo)Z8pVPpRFFN5Uah# zjZ7i$O|jU|uIA53@{X$H{_RoZVEnvRTxSXBh}fju$~YilTjMQ#jR}Q?kL;AQLpKNq z1@78CMsNIRnvEVzTJ2_U@pk1k5!-XXP*+JCzj8hdNhR|1mxjT&oRSSMI|>4ccIRcM zR5yCw(zVK)#y8`C+ef&F;Dd1XQSdES9|eRK(iiAfxR9PK1agxd-y+^+vSB_|4sD!i zL1av&PYjHZ8w9}0cmxJvhH3_opRt4YE>YNgdyR?Tvgw+}t_6o=(|%85XPMtnn{&ZP z7K+)Et5_{2!=PO~nfCmdbzn^{cucBmNZPi*@^m1?q9sV$UIa+oE*!iy2;rMgN+_TV zjy`d}XqWk2X3f&pvUC*lgLyFMi>8Ok;-;{PcsPVfalXR{A|WhqNP9UPWUT{vldQM* zP&qs(pp0MMPrba3frRHm4D%AcSs?&xGPQrs+hJC!ov1;<)Ke#UGs?vZYpq=}g@(dj?<;^i zq@w9kBrhrk0o+Lvz#U=&xass6KJ_QeY;-FIN#96Lq;F@Cg;Ui5(c4L#^G}GM;(TfV zH*7ybNhW_gr0N|-Ir-a7$ln$qf7|UXnE`Fhq>&eX!HDc4E)jVVq2I_)4#?mRsLF+a zCG`YmdA?+D+sK+r{}JEl2<^YmOCn%nZzmYBVipa|b~?;a%qMBbXc@$>VNZPk1SA2z ziAANk2&<4waXr?br5$u`H2wrlOszn*(2?f#*xx%TEMNLovKFmjVpg>wa1MGGo-ni` zn<|0A9GmR&Zh29QP=15*Ba+*}8#7(>j{VP9(GO}E-wh!d4{1NUr}2;l=dg8vQ_iH% zub4LiNs4fZtvM_Yn+fJ=J@a%L^OQ}S6wS`%H$0bq(aSr+x&?GAWv6_tYr%iS&)AHQ zOB>7b5FBhFGZsER`Y{?glxA~hiq5odS*5&^xc!R)L)io&juR3O4C!(d-?5}awXN}> z1s`Omn@NuxKkukalpSA#r~`9_{p5vyG`EK3G85fcV|r}3M)U`s6F+(7knmJ

B8=KB}vPfZ|?mivz1BMwa@fw3}bAQp>$G0G~Voyn4u_BFw8EV+)uHvC&V3dIXn-z8`ej0`(*EJtc zDt~ayw%My8psNzzv9$4Mnk$Z*-C_%bIT3u0Z;|hn*@;{al-Ub_ikmj8_{7Z0ihyC8 zWhGtQB1IvjX|v<;2s5}q{x8;7N|RbT*)cJ4fl)iCn&P?dRY$U!fLvZ_wcb9LM=t%+ z{F!1t&b8$!=+;8$wMqb;6ueivb73cJP;oDMknhon2b@Uo>)O5tZ|6_x-%ofF53=_F z&hdY|t^ot_25nWGLof!t_>a^u7%&~bh0j7-)A}!bHqh4>i(ekds0@NwJdZr}8^qrM z8Wh6mV7V8?kH1&b;~54L-MhhU=_+bF7AMf@S)u^@@}2Q1%W1io#Gq?Pf)=@p~cD{;B>cGNqj2)SHxbyOqiR<-U^zA zV$}&^GQz2>#)UdQCdkAz6UAJ3`Gk6ttg$gkHVZ)~Bu{%9GY~FSFXylqWXse(?k#yy zMQ?v4lX@H-1kt!mx@lbXg5NMT!f1xdPLCY7fK!R=xT*z*ag>Nu$9=n?GnUl~>j^Ga zIA5G%^?Uj6&^GQh&|GXjs(4^au?z zy2HpBHrKxD88k|tAjZZd(D}qQT!nBgbSHsk{rCjP^~a;}{#{(Xd81OLn}EhXB4k*p z9;iUKsz=Qkr|-}y(?w}kx(qsPcm^$_ZdDavIrC&ni4kGz-japQx$mzv9(Ecx-No1S!GyY*5k)J5urpObL z9rNcE_x=PpFBP+TJ(NX!^#F7x6X)Tz;C?L-iT6q8+TetCQP+KFuh)7i>u5qIcbj4L zK3^Ksv$y!C`}Ae`gv)Ck&PM~{DX(?6P*lE@3h2MOZE1}6?~3>;{*;ErZX0@k{D9(` z_(LVyKnwEmXnj)WB3W$*xA~Rgb<}L+{F~%rl^S|3M=0n@0P@lZ1aYRmBzKm_;xJq8 zERUTycxjP4%VQ^4|B5G46fEMgbtX)cJIiAeca~#FDg6w|gxC5psER6@Pa5CI_ptFFPa3b3r17&zDgCT8q$Hrm2PMFDD?sr(KN6GtQt?D;HJJZ( zR;h~e#40r&oiIxVE_YBVpcG%)Fc=x$aopMO)%rfmXRz^Ff34Z$P*$32J^{jh zrJ|5SzN<1@U-38i0uXe!zn`gWoS>=L$=RNhFP0JXfBq%}y@lf59YLE&k$zp-e^1#S zs|s$1tx!?T4Ayq(v86DJrA1s2!Al7eQt--h5N-Tyc;jT)hUHQ`3^v@xgAA*9D6h@7 z%>*1THR)-cBus57O_DxPptTu=mdl30sNOKF%2saeoLZ<~Rpf%*HjhHp8f{pkvZJ-C zNmrZP^Uo6eDiZnpt*5dN3D>InoX#@d^oOogBRJpGqh-T${2kiVz|gEG#BR4!sk*Cj z%3AfzD_jk)K`rWWvF3l2=MII1IXvPV$*%QD0&12mnfzNEGQC9m2q7X9zT|@E?U3om zgeCq%IV78k*Xtam^dVNv{_#X9I2VkA4R1(uhTGm_$4m?Dt4HjZ3HxaDhS_uH!r7;> zb)7w?V&3DqD0>>^zL2ebG@E)E1^Cmd%!InE|CBb_3wS)Vab+%jVa>dMF&zUWf&-f) zCoc+{bYP_OBhHD6s0MOar1ME_CH$xn-sM@-3LH0zBiF{R_-x{}Yu)~eSs9s#c93-T zgBjtE(S&~Ktk=4#K~OLWZRsxNm$m@xAX{%E9z@abCHSRr0Y-{#PlA}lbb(viof8B# zY|}%qhM<~h-h$9>WjBcgR=cUvF=hBVNh}jj{E5~17+WF`LYo%!8BJwS+Y=qU!wM#v zL*hrIYZW#OL&DH-aD|uONTv+jF3~WJ+N_-ujZQh;4fF-e`(?ltJecDGS?LcVE9WLz z=>;_%4DwD##Cf5#h~b(We=XIp+Bdz$>1hCm-T2Zm1Vnng#Ztmi6VMkxEy^o=pSn5H z(RO{m@iQIBshpn&k$Q+QR}l1CC$Pw{7P2Le)YCZ4yE~tf?6}e&YFb9w=&Zei%_*>N zz(G5{VMj@)INyPQ0nm5Ni~U+*K`bE_mC^G3>(i-E>6Xm)IFo;F@1*vAdgWE0(US{N zsG45o2h)4A!9_htIHjEPaUgsexAat0FCjWuY>b!XBY%^kZo4QVltY}Z1NW0!6yt6DvRF)6xpEDh*t!bXqM(e zRGMavZOEbwx*sv{13-BNbYO! zR|$0m4haQ1VUb7jE;G98^z5l5ZYKfODfRo4Vo22PN`NRJw;E1U_{r^35D_ zu-aYhBFKsqh9jFZL_?fbYOp~-LAGfeiTqB4Gp)@Wl{qJ3QNvIsXu*M6r@Gp~vt*SL z|5@imq?Q93z87Sx6o>Fgs*_bw82aND>EhB|JHp z%72%_ko)(*0WY{K{w8lWKct%x>=c?EMq7|yb|#Ivu_=?@eB(b*zfi$7d)IOnIRwb# z(GP6*ryS_?1CdodfokAek}gY|Q@+e@0$SJeJ4{FSIwH$YpvE?%2M8K+>9-pHCmuu8 zK;zx~Gw|$+-(Y5Bf<60B#|%gsU;VCWt>+URPas1gw3L!yBD5Um^aVxw#j#3XP}5g} zxN5b!tS@-u&%}wpC86a6I*rDl5(gOEFObg4TSr>59{U6tW39(C27>Bql3^mjB*Wx# zFgc(IecyEc?mY?Y4gES}oUqT#a7-3FRQi~WPh?h5#c%$L#vpPWvsuA#a~Wc3Q`TBU z0T43I4}No+rrxzUs{86&Q^QeNO6n+9o<_^Od86y^tAM=cg2!{Uf1WovKH$?870c`M zZ@|^vsn7CnV=?wBAwK?7F1!Yv;*ODgW4zWjzQG8@^68j!E1`3uk)O&&SKuSzti9fy zM?};2Vv{v=-toeI%I%rg2tA=upV3#*Id6p1pn$S0*>XVXnuH&ilYfIf?AOR)%*RJu z1w*~o%LpyIU`RH)ggJlKTg%QJsv_HR>*rnVNMAO5wb2R?KJaipY(N3(` zvY1>j$RX&krGuFzWXh&+!%-5y#|w*$b~M@Wi(|9LUEb*}UIxZ;EPo{}fH;aONfbw9 z!er9AD$@K2^8zOn)wyM4Y81t5UQSW{*Cd_yS|6iXII&t^KVnP6L5tQjxYHzm=*8%j z^-bEkY^anPt4>XMOMhrNyMo8_FJe=S7C^)JzRZZ-t&c4HxNVeXBkeX^xtWZ)sA>FO zZ{3-xpdO%U$Z_JzknayVi9P23_#^ld^VQu|xv{ojsr`bJWR1_I#uP!;et1q`tQ zI-VOIp1~OOf3-y%e>r`neJ0*B( zYUiUIE2OvX)4?10=WnWO?OQn8wy3W&v)|u*!=$YkP98fjr-ChsI}w^cVlP7jkiV8e zKgV0#L|NWCLxd+!y{YNffM|Ps36TR|b6%yvlS^Aa`U3r%OaIPE1>0QzG;nXUZe4h_ z>TYVCqkY;jzMyt*ai3I_tg1bu9p@mqu08WeBPaS zJ?KX1;Lh#-2=st$#VFpeD`f*&_zIIDmfeuGNbJ40Zps|-KTChaOD5kABsT-eZ9vj6 z3Z4PI4mHgye32H!nol+k*QprvHobL+2VMDYjqkw@h^32i!JkTBuqiZ?T&~xb((BLl zk18lQqL(o}(Q@g$0!*PATJrvVTJlK8^YYv{g4KnV+&>3YDQInCPTOi2uFwK`*MUT3 zqrzO9`&dt`6=$dm4ucg{q5w{>$Fc#HYN9YwC*50@m_Hb#;Fg(6lv6>5;^lwhnzMbX z;aJN!<1mtppR^@1IS5jmCjUis^SJiL4GAIcLc|4#+_HD4wxB7^0RfJmWs1OnQ zuz2!|Ddt@B45-ar^p8>dtyfqA+gUmdeoI*@nm*35uX4pAd;}K3u?u}%=qD~P0jv^G=$jr$u4ZO+vl-T^rIEHC>Atj`O`me|D7!E1SXA2FeQC!^k!9ce z(%wY9v9z_HekA2{P})Krt6d#l>+j@LO81HBWkebiv!4FM${AfV3=8vqKCwT4npvl{Sv3<&bW8v8Av> zW31A`6}FHCE4qmRzFHU&$`$SHV%Eo%Jp{uQ5~u_#dI|H1L7oW!#?TZSa+WZ?ac;=N zN`^BW2+k-6gJ_n^cWHl2;Y!5>g}FTHyl-r#kQ%tx6V<$6t<=<6kr(oyU7A0%$);YE z%z{MUu3!yFRPMIe8JUKf(q{3InuJ>`p^n!Y!EEG$UIZ^#6X4T>(*Nq#xqfO}d<&no zUCe%WLyf>Cbl=QZpyycL(b~VyY3=`UZf46SRc8`6+K4PkE`2^U>FMd|na|3l&%J_% zH*KeUfhBHzo@g<*K1b(}aO7Xcwhk~Yob(wGSBKaAeuVYwFvc0gHuq z=-54%OTC$`eR8@SL!;r6i!Q53O=>qkK;LX{DE<=dVz6uk7{4IEtk44kq!+halP?%wl6|?k+3M!w-OZBD&FU_J}zu^Q5ufIos z*|~LDaA8du)$Tl(kjN} zO&^6(i+#JH&EDAgaT8*&qRkp*mM+ko6TH^f!2uz>as`q5)Eur*!g9-u<><6t)a6wW zO6k|!_$BYY&YSm4d_M#CPd}z3`Sx3gKgwHrGG(!j`O(2IrYW`G(QH4(S1rXM|E7Md zqU>>AA*JdQ`pL+EKJhCoD=g9!Io4bH951fO6W^oApR5!(?@*b3WGCh54)yxs2zpj! zgA!Hqh;MbJoi<5rH)iRylL$6_`m~QLroipAtmKZ0L~E_JGb!4&_8_nDS*7>4R%F9Y z>-PKP{r#UD;T3B3`9yRjKKPxryz>9lf!p7;1HDG7qY8E45uxV34g5Ku(kA;f-&@Va zYhkq71}@jhyVGj-8hYYyz4O}zgT7^})!((L>q}qVubM1b9UKmAKDR%k(aX~5tkj~7 zDO|_z+qlE~zxr4+w}!cOa!bK7t2vm-i>8JSbusL#V@?}3qmQkD%ePOAUn0xkiBd!F zG;wWbLr9$t*-) zk%4D^P3D0dgQ~TyRn@mARof`z>f`UL`jdmJ27xqxONg@M9_bDd0MRJwq>DZeSmLIM z>L-K}f$c2+xLju=_xbE$ue+U@XxvP~P+uQ_Zy)$0rxb4WL$>Lx8L{_ZKu*-P!W$M+ zXz`yTl_uq=g5D|Li9ibwa=lGZZ7BS>x3TIWCvNVoTf-j|ZcEZ!&Q!n*A9sqK`sgCMlK+Z9-Jxy^5P zSm<$YlU+&O?rj2ZTK5fO-Nq2|!W%%Yw#OajQ94*D08yvog9;Pen_&)^;NIE|xo*DA z)v(^ZEq8Bg+*_M_bCdbz7WcNwr7h%bnMpm{3YSviGCO8!D}2#PpV~GjD16DHFQeaW zh07G4`h&K@?$cDN<|R!Rwtjq}#M=7)>c<1mS1 zEdCQjD|~Sy86?gb?k&B7Ad}DVcIa6(PLPv1nX38)Ioci?j%d%nZe&$_7A;v;5S!TM z#KR~>sVRXQ7vv@G&Bn#e3H-XvFn!SIytnE(0pVz^*>Y}B4i07Vys@EWsm-)Z$XoKZ1b*?WCPAGF*wltpWrg3Hbw(iZj5P&KkYcgI^L zj|g~jCoIo*0A1S*=6ieQeHgN>!y_btLn*`08&129+oV-lm=?NRh{<&5kmTyk2t4In zFf4AC*IE(Y;-6biv(Er0Zs#q-*+4m0AEW~9q1@Q!NGfCVjr>n5X)q8_3BmU_Lhd0@WDOcC*BUf%ZY%6b|z*4hUI8jP2^xU0V9 z)MJ3US$D%$r*|~}A=a^~(fP&WtUc(Jnd=Z76EP>vo^TFII3c7OdJLsw=qFn zV5?tLP!>wJ6*nqOAPa9}1uZsUJk)az2)))t6SUcxm2xH;wcBKZjU+8`eTY;d|50*4 zP=eQxdijq&k6&x;m`2-E+e9#(@Zfbih%V@CDu?@~b0kWcE`%OM?v!jG|L#OY% z?AA+kSdGG+&$;c%2V%c$Y@1)(sWaTsn$tuJXhxRE_!*LkFL^Trwk@Q(RZ<=F_Y3 z4$=g?Ner&_S5Ul<(m|?4?fu}KPDkRq4FzcTZUb_B`@Zi$Mgz=@mAH{Np#pD@e?)7o zNCwhQWOx#LbUarzqQhHf>)D7-9Ebu)!k#zxW6g>bO6`ARtHqeJm-x3sWmBtKr4mvS+8NO;W8TG#3hJ7 zwH<+cWj1}gn$`Smn1putg*QK}0d>m4Qw+PBPfP}nRlbGNiq9<97#cC8^*c0%=)&lc z^aa*heZhwe9KNLov*q~@-@U=fxB+a!(6)~g(6U*neVaIfn_}#sS9neBmcKa}^FeCb zcsFE=ocZa#3hxG|K{Q~*-2G|5>bP;Z^~})#0MSnXG{npYM*vnE0XU1i&{799(BoZZ zzM$r2{_66~-@-T#n)$2U%vVOue92I$X0@C7tN+r>kL7P8v2IMv(S;>|%b}B-|9=%C zTBcczUeZ7~$BE$WYlv#R3);O}ql7#}B?r;*yEO^p?c_4blT3<_)1X1`6sImx+#is1 zm?*z{JoCYpP+jM>&KR+d?y3=;%#9wZ;FQNwaor+2KuR+NE6;@*;f|XN-3mgJ_2{!l zG8f$RuuOo0*~J3o^7d*yN6o-J9J&>%Tv|1cQkH8J>b!Nk>NQ#KM9=5B8h^yA{)p|F z5gdaSCbLqq>P^(T8JI5{jS?a+cmb#x!L%VN#`jJllHki~CS=t<6LN(pLh*gq^y3~n z$AAb-8~9{gRo_E%ICoy~uS_SyMckI~nU-G-M(~>7@iK}%D1{ZP&E_?(@$P)ukA{6) z%i@>E-R7q@^}PVA0=wDAaq79k-spYzS!(6oc#iMub{*clPlr4DJ1l_3A}IrfaJjAu z`O@EBh`=2`suuUR1nU(QGVE_JM1vLIVui>(W-Fl#j(-t%656j5BRE=mt|l|0i&4vj z94*V;(}MtOGCZBexr`Cs_qB#Z!hrZB%4zV{Z7zL2DGdqe#>|LzhTWiW=>GLQ=#S`d zXkqEEQOJ9=!)x14hH_yK5dI658MJQSzp#PIEWNOQ`Xf3->+iJJcA2ZpGSB+Ul^Ikg z?_bzJWtKj6|MdMU+sy#eGs|4k+(M25v{)iut1+CmsXE1|u`Lmz(BQP9i+3$m*a<5U zO%TP~ZOXurxV^2ghjcBvk@c)~St89ve5hg*$+pO{k0dpjn_0|I!^lnd>;Yp53lhp+ zLs$(}cmhmHg@y6g7Ef%;9`MQFh65)09e=EuL*QhxI^vK$;8TQ!IpjIr7?`9{&a(l_ zu8Nsr%QpHJu-A8+aCoR~o(4K(5{aaQP%nNb_i2@QCkXPva0~4T+Kh|oR zTjSt=skOyOKTQ$|qAf|w)J`bzZRHJ&(Ji>kkFigJ*~)dboO)md-t1Yu)1A$)m9QIu zCl=pY3#k;BnuokQ=G6Ye1v!a!qC#XylBf_JP*x$D0tpcNkuavKt2*%S?X`~8#XJ2R z*nxTz15;*9_t<4U{fw!aT6{HvG3_S7Ss(hcdC~Pq<{fu{JJoJQY{ejs;RlxwqS`kY zlV08~IO5&UHTjVN4Kemau=zF(R9HWO;BptNA=u`EV+ppnAQ+89fJBU+2=zmjv19^$ zfVQ{^#PxsIw`x6buHMVjM*L*+YNy(+DWOK8o2r?TYsb=GadWMz6$^PN2?frXHciw7 zMQEC+WeQIqtOhGQk+Axq@MH%oYFcaa^_|GzwT=@a4MvE=KnQsnfx!bUejlfl0n88! z4$D0lKdvDuevZn@ERKWj)Gp4(z@T@1nrm?~qG#3#4I>z&X@IkLw%?lWB~p64P!er*qJB zFC2ta9us?oHMnH6Y0_qY*UMveO}_WTZi~rLucsRmQC~%s3IEn8x9j_GlO|v=n8RfMeKsN~;>J=o8;p3!cWiNIx{i)KTv`?E(X7dGq3VB_xxEhlj zY7JG@@Q9BgjS>&@T7L@!37zj197D2C*_RKLjmW|Zu=^Z`XtJ7b_>&X@l^J=^v(pb3 zcWFq%pW*PV!moYUTbw--Ms-6C89M%GS2{jY*V1PfJ4B;bz^ds+$1Wes)5RYkCywK>f!_LnD4s~deqVhy@0PUt!w!8}-lM@$R%C~5Js(C(u5PGs638lQeU1GXVE4jQVGG9)`@7&zq!oLB2;R-8Q zmklBgQ0ej$J~mzgaLtcQ!LWn%D+?Ip=}XD0p2 zc_ZQjm6IummoGeL(&Arq3Fv4ft4EOuT#LVX8HbIb(+y3Ov{0&rf7>C@-&xRv=j|zh z3dsi#$i9RiYs$fJ^3oR2U-my_nObjM!9QINSp*Q5i<`9+$+v$>NTF@~J! z;t$2Rq%bp?3&Fj`Z{u83&Pw;yY1nrKIsKykwaaVW$p@U4+Qbc4YT{BUf0sqS6*yT+ zQtcH88K7zsf%gmlr50&2$aCHHdxCP~W7_)aF146)@p^CZL`ya|`+W~{NeIVT|4Ee+ z7WmO=-xsa_^2vAlurn`OaqHB9zK2i*YGxv>Re9)|mS&?mgRK5GrQ#o+)1rao`~yQu zr#eORg)W^_oWaSJrT3A{(sH3o=J;lCGEi)8D>v>AM5;;SKJt#n9ckaAnbi6wywFr@ zBdsB@D23g_kkZGfhXbuwNd;5<{EJ8s&`WQ9%(AP^`Au)3z`AG%*`Xasb@2 zfswc5&i*ga<7KUAy+U8Ax%o}KQ^Q4Qzduk_I*!c4y(P=~Gxyrk*?J}sIhPsptwmL2 zhORnKaN^TPY;eP5U`4I8`4D7F%_B@@F9$+e;KKy=C7uIBpmI@vnjAR)Rmv`+P77`r zOaNkS;{VrQ8@qyjHHe!3zT_wN2EK@Ev?7yPFEYO8huGY~+ z;8jFr^`i@AX%8iw%#PU8bT>G(&@wuDA|=W_7w(2hDM)# z2O4>W0~D96aCcqTk4o_?Ml}YCO$eMd#g3Yd*8c$#6MLdF0I3l-mX6Ha{9FZx1GK6Q zd8lbiWm8AQi#~tf9VT+ zZ7T|{^c`(0s`bHiz;gz?i+JLS_=k9Ell`3eXLCG}(^qBzKDLT$YmUtlUXddFby@LcdhuJ)lU2hQOo6lkZ8 z@K)mg@ke_&GMdqc;5HVYUBMA{!AQvG=DsA%9>VVU+_7eC>YVM*)Uc*6nBnDhHe(_P z+__*c88%gux2d{UAwF+nZ4OU{o50B(>{)eDQQfw|I@kstH^baR(O5%kt0%$>HQp2N z20n?6u^_m=jq&eLnuXfU;`2c^#@{tqn2qt$MP_5%WoKglO1ze1x+xx!paz4Gf*KGD zO7AA}*MP8~0NizZH!Gjk$ol*A6?;FkF1}r=6Q!B~jeW%AKo_OkehkfxKf*#kOv1&_sEUspm8vw`hGIRc3_Qm7-?TayQeS)2L*}gbwUVH>~ zCuYWpcd4>jaXMDTIDJRsj3-EV2Jb_ixzThG@QiK@ zgwK}0#i7`M$q<=OFd;s(m$b1wdwA--n{UW3+hy4lpK0I8Lb(ourS@F--oP}u3|;;9v?RtVRNqjAnYv#LsORp$hw zt7N~dKYTE#YqH-pEn#y_o_`a2<~=Y){r1c)V7jws#xdl@o%XMm*wIHNmb?0^aM`MLBNGv=LJHi0hN&+Dq?z^5dCB2PZ_Yp{o&fU+SK;y!LK z_p+ENG}9HDQ8xp2zHw`b1Nt}2p3oerIz@77jC!2quVB#B+U!#sCZ<+xz^ZDa-%_go_aYO)0QUr`h-na)jsr~(2%476tB zPh6%>+bX5sp3UDwFPmiM`j;wI%V+4 zJ)${**3lMVlgu8iU$}xCjw{IF``AaXxK!D73CsEGwc#vQS-H+72_ggaUOGud_a0;8+{fX!ZvypGcyw{8sr0#6H zUB7Zl=lqYxs&@e9D#iRidiY)3uNY?^y(qDd{*||6E7+e&zwIq~Len6)!#ap}1Fl^` z1Ys137v-13$fn0^Cku`}7BcW52zpC4zI?W19~x}n_fu#X09x|udix16RCD4$~* zF&E`KaVo!@8HnfqnZn^6yR+%n7p#u+ylH(gQ3dL4PZeKy8nhdFaaVuaTIh>xrmvS3 zwNM{qWxEqg7Sz9Q{BHa@o9F@LpnXZTvG&`8?WdFJc@@Vk@$7d^Yn@7T?31B>0~lBl%hO~1 z29~di_*8;g1QWj`g)8sYBsN}R3JSfk&f?k07#Eiuj)Y0-GonOyFOVABStH`4F2-xpWf?xXHk9As4BU`z|_8(wL8-OAA+<) zRpZ~BH;90w3LvrVyPd&UdCwI%YcJ>28##loY8~Il;ko1coXFeVu;>SKSu^D?d0|yH z5d9V1g~|qXlW|&D2c+Pc^jE;RRQ1YQagsO)9NyVGtbvU3WKrg|Y8`i0Wy9!rZ0f}Z zW#h~Wb9VJZ`PbWk@;@?*?_y(&AnVLj_R`Tgu|s9;l_~&0LPh=E8ffN%_Ci^^<*u%JU_eZ3 zu=#5i@lRMtn&w*lAyqtE6-Q&Xl4MqA@k)0&PgTioSB)q+G9@NT^x45XK4+#)`sDC7Z@a6t}OXQaK|# zOYJa|jJi_-lHtn0m>p$UgTJ%nh?kL!Ti>qus|4(R(9$|qEn^AXekmC(qk!+Zk3F3d zB$0pzD&a5X0Vqkal=g3aUVCd)jB13W=-+X*&VfgiK4d&mvBZ}40ob7;de*52+{`~1 z%lKibWa>W9k11E$)QzRff!OaqPGl^iF-uy?&Km#wA$%L2Maoi_(%PT$4W%3}|LiI3 zP{tgD(l^^QFuB;NltZP{8jTrAwBly|Y6w0L#*N@wO`SpS0_Nq-y97dKdWIvpo zv^V8goXe-|O?7NF%c`FX0dw)6_?QnSxJutYs8M4{R->GL>Oh13W);ps#Tks zGc-!YRy3W?U`@1PHnr5I69=_RIf}c#h7ItMrO`A9pxRuY$(7adO8}5Kh)e~%C#6*D z!9sbKY>w%P{4<)l@d@sFSu;1rXKZ3_Jno(5#{a0Qbh}hiVpUS&(K@0zJh3e9U!h+r z&L%%?8cVif#b*aJG+NK5Z6qvI!b=y@*i&3%OVi@Vp=30>bQ*u+6J0dG&;nc0G=WjJ zd6e)Cj)&NfZ+P))`3rP*YXg?n|NCBli}I}3t8Ctv?W&vpQm^NbpU$l(ck#f_AmO;Q zR9PCqWTaQ;m%gTct#O^CH-D828cS)CipkHBNon8)26I8ZP#J2oMRzviPFM?Vv3oW( zpG{?dOC>+2kp4<07`E->%H~#AiB{&~?(+DIo!7m}yK`W?8fukZ2~`}!91+r6i+@4LxUVf{LIf1clnm-_myf4Q&k z%;c%Cesg(mD)#j~%k7~z4Bn-_G1XuC(Z4cy9e+ctZCBUec#(35FE=oZy ztwv1?^WD5b#joUTS-yjOn6bH(qDyHv0+8=Px#v zAsha@ntb(bTjX%pd^%0Fs{bgiea974Ez#*|sy8Tfs?A!Oxpv?VO4JbbGOFD}ivG7MT)POpGg3DmUJOiWnCF-WT3mNrn_@8xT8zh|mmNmgPlrPVL za0OeeWudEEij_5-7_4Ul%65CD%J8GSP!@0`5J2Q|fc4_VVusda!*CYq+3 zmG<`Yxjak_JAb#nM+NkD8$pZ#BWGa|np;;uJArC)!P~iDTk-OrYuB|W$Bh)B#rqf% zf^EyK>~NJmZe4AU{2CtKsDiIn_FDnkf}h+K6`a2sOkP#q-6Bsxt*nLDEA5lq8iMw9 zxWcx=7?_cNIWosJpxO)#zqGo8$ale21nXRojAo%shUEk&x>(AOCb(b=!5V@na%zq+ zRix{oy7j<}!M)kM;F2u&rg=fya`#qctu!q=s%a--mHT$JOLzDgt#NN!G|95wWog$V z3cK8_nV^!&YsPq)dZPTYk26WBw8zyIH?^MN3)g{+!iiU z4f|Q)`B9|anwi7RYp9(nrw>#~TiTL3+Ds95pw80KB;J-JZgx$Xv!(Doi$U{7Ey;=0V=l z??83fAl#{G{^iPlMK_|4LpWT2)lvgR4>5*6TidR?h5P;6>}yo_BTlX`x;1}QW^l3> z)5xHjs>^nD?GnK@Z;f?_ot6F&flPGGKl57UqLP?pa>{30{PG+DfLZxo%ch3x-P$6LV^YE6sx&{kk&W+GKYHJfYJ@u@LqGLo-*Z?W*d-D5 z(Ybvsd(b|R!m8!_#H}vhEq?0NzUS=3=V=4{khmK`q;>s&IByoxNlCRLTf5l|IFGHP zfWbzbQ^>W?c;QFO3#Nd{Wqd<_7R9oyah9B{f)L%pEF)S|R}nPoENArGUF2dRx=smC zXAYqVyE{8~J4rVYQ}OkZz%tu(x+cJxm$VP+d?wgfwkLhuH6hd2k?HGX1_qyEOP!G+ zce=(#!scMi63KFBfX!)Pb1=4rO&cQe0^>m~j?`j5uS9t&<^!eJF#5$S`Q)&#vOyhB%EroGp(0jq{5d#AMlBb;k?#Q!NO*uOh2W$_jD2D zb?5a-v!_Lq>GvFM3xU>&6!vxGH}5GEl^yYOr2XF!sAa8|mQ;L-iMapX!F?w>YCTBB zzh`ste{b97!uPGTe^Y64)^rv_?F^@nN<2*gqmK~jt$RAy>D)W|BYI>{c(2xcnGbIh z)^qX0L$OCj6oD_IOl(1=TcxiqJ&E)~XzV{s77?F< z>>}b??NzpH1T*mY_x$R*jMwmM{QYIT+^_u+9KHNsfe71pC2cG||ASOT{M}w^am%N_ zdnZI(gz(+w%9G#@jwm|foQUA=C%LR78$5@|$*j-S0kOKe$SoR|c89{Uu&#X&wR)MN zpJlVX>Qo>)irk=XdqdEU??C_2?#zhIQ*oG%ip z5&9ahBEl$tm9$5P8VSHKVfQZewmdgEQ&iq2<;5?yxAt(Zh~e;* z2#*MkMSBsL*DHdW1n@zOSbUrcNZlBjNC)wTdjZ zoFw&3(F_iCH%u0Sgeyf@yw>kfStfYKkKCb9c;*DgW3?ZiUE>({&%$BCl!x=G%^)Pi zbH?YZ{NQ4!=b{!WHTsp%u3&}HtzfOuEVNWrQDV&@sr|D^C=AkaL3}-nvb@UJW*R>;B|>_ZGxK>(2UQB0Ty1 z6f>N}cdihd9oE0g3emFRyOhFAeu6Bh!qw0T+k!u(nH{dIR;Qftcs9g8fLmb@0)Pfe z1FzvooRf-uJy~#_fVbofmE!I{sl#yh&*T;ZQzuQ~;BweoDVf<(oF6>P1g>|h%r`F&s(4DQF?Q0XAW9z#% zH@ZbW8ku3^-FcTa*gNyVMh-uZZ04@?FQSNeO*Jz&s2?565E&#IOC`;ROJFw)A*JNQ zR;w#88adw&#&OhiStp-1bynkLF__so+F1e!-lc2m?=d(ciaodtjN(@=(3LAMaGKN3 z8d{+erS5IQ&;1a1HYQ_p9shT_it#s?~DSwX{&Z*~}*!fUTV z6Q**f92nLQIBn|Ujh1QmX3NcF?MIJ-tglLU>6&^7XW@!hw9VpZ@0H^Be#X(z^{DB} zIlqQ(lckwJWvS9Gv9x%QsG~3jH&lrra-z1jPfpm5_+QD)NIIaRi|_nW za+~m&EG~+@d@G2{8(06`u@HcIiywwv&qZg@5s6KtXxVjd4Yil9b!ErO_Kxj^>q`9A zA=Ku!9!bp07Grvs+{1!%HOFLy#w227cO)tbGmoq$zK(JgF*b|iSBNjWKPJ}cZubBW zn?)(OUj-9`k8s=(+4MMX-ebs8y0X#8BXYGH{S+6u&i?;+dl&Gis_XwdlM96F2`U;C zAzT#V4MM#T0ZAkSGcp5s0ck60tD>#f!VF*)2qe)Qrc-Gv{n?wPt+w>-E!GNXD<*&> z-VrSzsNnsKqlj7srIPpa-RGQ{NdVj5^MC(uo@Zyy*=JwYUVH7m*Is+AwU1+VH50L( zA)rf+V{-Gj(qFjK-i0VkT*z#9Aqo>0QhOiw>P}tX*~e4E!~yQoATV*iOO;hOh9}Pw z^rNiC1TvM|QN^@xFWIiAy?WZLr`=M>bfl2kra0DRSYUbT2qnME8X%=ifCsG4(Z}3H ze1u!kx${Ms?x>IHcTDZ$Qk1nHng+$@NWQM7e?3)-y=d|p+}Z0>(}?!SYiGIypO!3y z1zgMS_F;t6TeNX$lBoM69UX=rrDnynM+be8zBbdr{vsb<@{YcS>z?b9K3V1Er)C35 zs6aN2?uF-BlKt;$!O4-SI?~M%Ftje&_i5NM`f8Ohl`RjA@Ah!F=fd4uStDUo0wVa>2G5PV)G)d1PdqNqHX47G#Y>6KdHx_F3Wp1W0dooj_*c=Zn-B`3k zi2|01(aPY7JC|(Otu+ihQB(n(2$YUqQP@UaZy-%eiEe0dMKij5Ls6fQWItDZ!rozM zc%ruMt-)F|(YFR0MK!!HH@|9EGPDvdphQ(CL%X%>(qmn76)i61(cC6fffgRhv(uZ? zeZ{&AH&8)0qWM>mI2*4L0^3rNQ*b zsQnuIttrVf^Qz5(EvnO=eixU2ap6wXej!}z4GiVf zeP)1qO6v{6OEg)HQtg|D-lCPeW9aLagxfbtDLP#FPc8(Rn~IdbLyT8l>N8Uk!1xgw z-Qnlc;kq{{sMG!I+@oF+LLYYulVAw_+$l^ldhZ>UjjKJ4Nhu^f+bPn|?ZQn!DCg#p z1=ZOp%w*`bC8jWsnRoPaXOP~ghLU=s$%lS!a}_G`=;yu=zpMJwjrp;!XBBig{&y90 zpEnA+uFAX=bW5b5tE<{w1zla$?keb(prG47qo7-Yf^L6-<}2u$t>9%K6MUnQdy()> zS6Fv0k%-1zFN8YoFGN~V``K{$!R+iTeXrydMAM5eTT8#L40Zp;?B^p8Dt$CVD|9_d zx8~yWM1G{Qu+pmAqVM860$43a^1-C5N5(VDNliW|Z5Yi2LcF`2Oft&1To=KBk~S_j z08<%Y7ZCCv79mh?!JLaw$*r0j$*Sa5QA8u@$nS?rNS{v`p-u*<=%5-rS z_W@M`_hwfgmka61kNR`eF6_>n;6h^dA&lzoRE^F%@l~3(iQpK2r><@{D!H(!fW*~B zI~ibU_CyCTn`vagX&8z#TUhJVPSo@nnCt{=ltE(Dc+mq*eZX23%bmF8^1!?#62={uT z>&z2)ck53y80I>2LlIUS0z?*`+-l|g$dt!%lO0^{s@uAJ%^i13h=Z*F0wJyw18=0) zRYbEg_MtV$JLayTNM+l+Ni>7?|62?+xCyARI*B*1N)}V>!{vwmF3qj=t@rK&Wz)b;mWZgW`PgDtX6^dNxYVXBN1I z!Iz9d1^KiwM$z_zaQQxKX})I4F*h<6ES^|4g+*vm2!Xo;2QID>5jrSYIEHtF)@03? zVxC>ZJ~Y#0GB*lK9hKX@v2iS7@*Lvo+wM31EX;lEcr>u;w3i@v8Sg&{eFQusqRtb;%1^xs1wCoEe%;@iJn<@hwcuKnYdf` z6K&j$s%>Ze+f797Wte=K{w*rER!09eT=}mK`nSzGXavrynt-!FyOIV$$cjo*?duTL=B3`@2_PE9eZV*)80f4I+xr5JTd`S2Mcwd) zBx>eLLGdEpUB%G%u)RO>a&25w&aPYgF4YyyJ-QmTCnl>iVwZ)JNj;_egzFxirui!t z4#T1I1ec+ShAomj4xuHh7bY2c(zybWnw_fq3p1V;)>eG#{Ca{u z$7|UbE}xt%yc&gpO{$4?4<&+4;xF>_z!9!i*lU)jDd2A+Kxx< zvCneV<+IFvmpxlzWKzN95UG1hAzU8!QA_5_M1`llhk=+7 z-_Vy$tk?{-)t!3=Gfx(m9T9mft@jXV)&JVG|3jk_YE1lq&Bg8gA)wN{Z8e;xCr#Ru z&u71TtlrGb9c+X-0ehQen0LpYD9YGn-lqrm{?jVMjdE22QQX^cT9r{BP^c zjVu7H`qe}j=wbJamcdKT#}i#8h=#xM&43SBOa;4~hks2Iejvicfg;FYys_{6k>m&x z(k$a4XCGkI-%5m6_&(=*6q~x1nCi!vg0)3YowiqRIU9DgUs6Y#BqC~4=rB4c2fa>E z#T-SXwj}N*!v)>yL?cnT%Q4Z{YX zJZtJppGVxUa|(GD+@{N&`P7jf?Csjm#iS?QmmR6t8kaD2W%>}JEp7}8wlN;#Ups^@ zj^-4%`fmYd|IJ7}|0)cc-B)2%GKibEnAIG+=a6mhn>pLwUn&FFtFX(+53MMDdoJRpx{qOxWbWG}hcyV$1W^9UP@vte@)BAbsd=j4bt!YG9WHv?@?t0YD#=F9ka$RJx${XiX$njl zqjjoentTwEhy#ON&?4kK>nHPEgc-kM_0N#dFH}THYAt`xN>Y1zs*ii?xz;9XNl#K$ zU&^XnMP|dQm|M!HiRToXlDVb4*{LQ4Nbx7R*H2f?{v`JwXSV|7u75Lr4kO83FCH}u z1!sYn0c8X=rRu?s(wcovYn4RQA2@%%KVP$+#6jF{TT9zKdC;!dKNuJT4v_~9@Z>>v zP(b=c8~w=EUG42R?QUNp0A?!`FeYM8Sh&NSG?BV{ukcAAT;s)CemL{`ft9` z-hKjM9?xgJ)!u#=hqE)iUp*^K;$8f4XZ=e_JvjfqvR%yh(;?fO%!KVvI7hlClvu6L zBE8$;O;+kTxp1Y^7La;deAUO+%~-|NEnLY#wU~OGdpCbJIS5g||m_2ZG`PzZ*ORZm7{F|wM z0P8~RZgphBSB7whdtS^g~zF~N6eF(%)rYGIYEYKwZgsP0`6_aFxjNVfcn~?h!6_i-9 zz?26mJj%g0Y|l!O1B`f+g4%s?t*|MWx|YC3BZw+nP@0SOq$M557nLy zbPW``z}cr<{g#8SuTiw7X0-6}8)0T6wU@Q1yGiC$k^mBOPKUu+-f_Iwy%!2|^F*SH z#zqjnQp#*NmYz@+wkL-8Wm7z{ieIE9lMzKt8O{61)(NF7whTY*D)VMnw@$>(K01M^ za-z`)EZqysbD~*~C(MN6Xz7G${I2Df!X}3p5>33XYMmfqM;_6mLt`e4jC}_QFVd^Y zqq&e`zgE*Ls}qaMsuPzEWge%`RGY?~a_BsB?m?VPE!=Fa0T>rQ z>S`C8!L=?nb#NbT^066iohWs4WX-cnqV}TFsC`q}>}q?)aQ-biMdI@=sTcW<+k`Y- z1N}5?iB*IXi%Xd>7aLd#M#=lanC}*ctqC6{ra#U3)w1Z;Wx;K8Xc*#0SC80}t0MM

p@nH2bzoYV7N0+81BO9T#DV8zq3?cJjV75xlGsUgY%ALk!Xd z1{L644x!AVX_wQtvuN5vnzjf+xqKwe715N2?HOv)yy3Hh_QX@5kS=@jQLpIJB~-UD zltb*uS4E$py7x+?{E-FTK0h8DbGpx3Of_hFpP<$$#-Q z`1=hD3i8^ITUJ$q@>w5lHmw&ojy_A>#^SHf;(oVrkC+6tqXRHi$#E}s3*;dH7=JIN zCdb`tzHQ=ex#YP2$2{vES#gNJrWBXTCYPXkh><9Z!tW|H3lfnyI>hcCTUzp+@8+`$ zNAaQqe$MSiA=kvi_T-XHRV6BWfLC@1v*FPUs5}t@rn_RT@?vL@;@|Vt40S^pZ+pxe zHZk4eeK4za_S{JFjyLQrlcp3Q`##(!tmw{U-;_xS907lCO!UfUCylYW+W5f0U)QiT>Ee|D&eg>t5?McPq)=4ENk|o^Q)!?{pq_$C*OTt`gaBl_pO;MIU}94i@T*@5kI$F42oQz+T%PHIUHqPlQS zb$A2KOW(B?Em0+LzetEbsU$T|>m@3z5JT%Gp4&rK@WW5n>r#QeVf$>&a;UPqQn)&S{~Ci)sJTLjWGE>b=uTIc0tEwixV61fE&$;tKeX5t^2&s-0>0n z!>BzUG*4wVy_u=!iZW0>oPMXYyi|esrJ;JdnI{sPYM)_WeQvNIk5dWkv3W4ii-o_T z1HsSkNT1W9_A^nt)sW_kE}8uCn=T>Fz|BNB5zONI=?hcjJYR{Zh2<9^vl!X*``Uw!PGwmYGTItf)?utT3dF2?k}>Nx=jEncx0*}+YGyCTK}Gu-LRTh<5`u0ds3rLL*Lvxkvs|cd zj^&$$?I!u3O!och`H=DDwAmT`edc8E{%@t9l?vShVVL$BesIRcEVntoprl zkZ~Ekq2dW+C9yYbJuqdsRsWJf;cX+Gr~ZdJ5=C2MkXy^{0F_(yJ5|>WbMy1gM2~#a z$Yqm9qoLOzV_PE7-W#$Wm^?gwB{VyGIaAnfZZdbI#@HyvsE?cA;t+QV zR&KT$jCYE}ZB?N}g)_m_8eD0uo>pwFj;=ztxxpN*+QHq8QO5-}Vm&Z-c>H&1)&Q9X zjup8!hwd~QlPldg*YqmByLO?I7rQo`IL`@CGAmJ!I32xMcNmdwuT$M(Rtl5O>RX`T9wN!)!rji=fgCY2 zZ|beddGZ+E2D)!6ouBJlAOG8*^^LvJCeybJ;9btCSBoC^IRo$3pvcNUE-_<3KDg|E zgZ7QrBf5TzG@--4 z^uTN=W%M?krT&_g5alX37()i8uXFnSv1u7V`uc!8tFBK(w*y5;>;%6IHEm2oqxq%W z&`5sacm$=kZ6QFENWd!5vxOvH4P#gWW1c zSUIg6L4?Q#uLYF!{0SPf?aoQ8if-I;=lv5Q5ZAB4xaYqGu>1LFcGG6FUA?Jrs18^- zLJ`Q`5sGt~yPs(X}}bC-p`7;aL^9!qirAP4Ciq3K4=@M zsT;kRx|aPR`&h0gr+1YW{V5g(H-Rs&Lzzmfy_(rFTuqUI5I11E zC0eWVQg&O&PJa^dP8Z>JI z3-)o1mv+lp*^b;gws6ZYVA9jXx0#Uie)X*ICqnM}rGV&0QpZ4;0P? zp@&^op-O$X4ByTN@BrpDV#6B-#&nT`*R7ky|#VH&| zZfkD8>ZZadEPDBqbFcQ5{QnN>nDcYaiE%B5ox%*x9zV;U+gp|S6{nl9@i@ocTetfl zHzVZ7xfihdAaLxRJJc|9L!-$HKMy9Zz$UJ>OM1AqW)o!1#v5jGF0CO=@mw_CTJzJt zC*`M6H)Z!IT>L%2ABY!m`kzh)mkm>)!DaIU52N^(w^p5Ho!>Gc*?(wF-pAF}4o);j@Va}(z>#Ued+L;#%o8`G)}6RZOVW3&S^KnMbdELm$`RR@koACev8T=F@7a2t|G7>8Z)f@(YwQ5Dq*Cez7OB zR!=aNRYK=iI-s)|bXFPYHpnJZ%A(KbKcGvbKsOx5^YyNPyQzN0M`4rG`KFG+EBZ-@!n)j`G33wyPD+A&7 zXCn5=5&NcMOv6SS0eqBCSSz(%knI1cYDA!@h+WQtlRPe|C!`i;x`VCdRAennXtNTE z?UM#o1-3E^?=jVUgS>X|k4Cx{T`tPR7*FdZ2t1}x-RoLJ7Lv|dy~Ez&TD69|Z3BxX z#xM=l$j9-ITSN+&sFmSl1fxEk8rYnguMh3p5(%(zzqmNOq&*Kgo%O@uRt~YamE!K; zHsWMkQN2$8gVy=`!pZ3vfUURcB?gZsYYOwD_PY$)aPlHZ1og;^aTmLq8tkW_HuPEQ zpIFd0DoMaUM-5?%pV_afiPPG#uY-Fcc*>)N`cOQV7J00JzR`2yK$Gp3?IBUydNLgiL)VUr}9 zIt=D^*H1Zq5irgpgQ4b*a&Fb0k5e&z3QU$b=Tc2#=4H=m?m%8Za1i^MR) zF|z+TzEs=AUCxv{-S8vIUEbi_C%I#_Qy;A2#}%V zSK$F=FF!!D5<>odsaeEZYg)$T4@43@cHAI-tcG9FAjw!7ZZ0BMmMh_2Z@T9YF03^5 zEz)rWVOU{Vl0LzR##*);GnUJeaMf=I2MMg(5aCxn9|_$2P#2~MYxN8E3;wjc12ZI9 zHQ!2=X8vR9Y^E`R`1;h>DW5gEXR4RrQN^a6sMT(SOlT*pEcaG>?M(Db%~N!yA>gA; zE$hq^m)bAEaxo*jt6#;ZDOrDJ^HKhCH(v)U>o1@EHF_VA$X)en$dh12nylNv@-{9X`w_0W5VJ6p59be^>WVJiL1!7{hC zQmr9hCLp%~6;Hs5q)*B44>i7ngmAYlJ~3k7+!lmcvwx%tjE98hDJ|P}cS+wE1u>6R z*X~~&dw8v+)2SN)&_hLzO~}DP$ly z(2`l2WI)?7^N~l7&Y)633W+ez_95)au+@f3)$SoB@ofhHrRVX#s6)qHsn-Nowv}@h z{x1s9F6-V5qadq>b=x+{L9R6>m%jZKhEF=mT#kjbwM(6 zLLg%AfVY&JcTItO$}Sn5rhQi6yi_g!{uekMK=Sld7!8iH)%9>X%)y!7hiB7ffc@+% zW;5Uu$B}{1JO#NF;}Mq4D3?&LY3{Z+(oF@K4X6=UfnzK>RVA~13wB!4OmTHAX@%XBmfO|k;E(Y1x_`P$zCb%3H$LMNLWR9gHX<=s|am`Gr-+t zgzpo+Ae@1T%y`lqDf~6(`*W1-r@NQuH~3ZrKz>~A@7cs(M_5UCiQvoMGW%mrO*mlO z7q;QYQIpXjh?(foF|#LCg%JxXN&{W2R(*fwj&Opr=)_`7)UGYH*4#Bp>vZMQR^1-a zFC3jD%HL$5UXZANr^+mMFR?L<&5I-_z@Fhl;_a@n6U|{&Jv||suhoT_=2U5G98;g? zammwl&O~2c=70B+{r5izBIE0sC|?0ES^~TVZq8AGPOBOXI$qNfO>E#!PHE1^;HztN zr4CA8uXm00t$1R2DDjvJAHGnYjDHv|f10cv;Jed&|L*}ytW^dydx4MR(3H^YV-;Xx zx1mG?LyV&&P&GLBY*GD}frrCNgD;MaKkER`T`oNKfBo~YNm>A@<`$w$(u{z!GAYW9h;mPNcYO7KzczV z*WDu&ST&!x`wYoYoQsY<=o%ROj5p+3-xqCRl*nO+uFw0ewCm6h7KJt3`vt6IGBT7A zx*s5o3Ar!^IVp2ZGpA$y!_O41XwqdB{Joxn`@W^Br?loK)pLS>>+1oeDb;(qyY=F9i^;_h@d zJ0<_*$ef+d)!!v`&Ut3;c$|r%DyFwHU~Msw=P;7Cph?7K5y`U_z|(1d3a$z!t}Usq zU3?6jasq5c6IM8RA+BxCN9pxA5TT7Q=ehFoB>_hGs-EAExW2s@&j&gaQ z1sX{NN{^y1UtVK8;mAjwwR*1vjIJM?=BoNnE&ElfH0*Q65zZ5-6-r8eDt;MbH>8*L zK{Q>Qo{p5*)}p`k7%4d6xra_<3ypl0arP;yFYn@IRVm~KrT!Gdp2VBw>XLamc~-qP zerR!8_0vuMcal9Hr~154*%NrHpcAQdA}D0UzLFZp`j(|N4<58m4#@5260ag$Z-Enm zL1#$EBAU3=pu6FI$U40x_dXdf!&mojI}ABi$q8OmhCL**Y?OLvf$Mk0<6d>FC* z(j1W_^Kl}%FU7qj@H6dC0xebMqQDb}r~y}<`5=z5bWtFk%GtI23zg#>96uA5xu1mL zQl9S5JG{{XeNU9jEQqbDF}aK+%8fvw2Pe!%m~`XH4uHSMcu0rAYPH#GBc51=omJ{X zP!WK5~&NWt0G>^2=Kb@fwG*JfX6fX56^dQ^pGm z&}xRR55w?O{~hYcisqwfTDJZBp(rrKI`O7z{S95?sAcY(h z_a2DdLar-*CbyY6lbaz7W5}qKq#||tMRtt84=0X`qEu--56oEWRE)}B4JRiutAJ7+Ereml>MKa33ycDV8Zt5rC#R?p0-^jInd zD^JxJIVn>4P`6iusFm2-0GWE55p!_GT_Cq_j`h!rj+j<|0CnAB*y$Un5n|c~1(@crm)jwKZ z%rujp;YKPZ64;Y^ozlWf-jJk6$9MP*E5-B|LPFIqrJLHr_ISLwg;8%ypP=bx-_EFg zMRD3P6^hko7u~>GZEj)R?~|9( zrM+2Hz)gEA%zRpQaLzGa%RXauVY9U_Qod{MH$yfKGaL?0UmKli7}mz%*1B&qE#mGe zuVHA6+0`Yf6R9Klwf5QrUFToJ1r*lm1EJ)JmkLE!js1Hlu&ezYIe4mniYW>D9G?ER zo7lRuR-G7lip>!jCYlU|b>JDTI#{V&8P2B~!$7zT)~&ZY9p8%Yd@RKR-bZzfrL{+yGzR`yiggWFy>r(ZB}YP!NLtC_O; zX`hS#y1Bd8p?vvb*jqWZxxi#k6&z--N;jmD)M z^Jgd-Z**?k<<7cs_&-gZq?cohtQ`YQ&P%<;woG%c0PR5?%XhLr+w^IB`vCTD3kWX| z)@q+e!dk*#N%SEh0KgVOzkV3N^9H`j3K=7=b}1(7*AHWOew(sBA+*tNMbxEVKODpR zk@&^o*gyO|k4fw?{WBIl&i?M+`O^a0B25urHAN|uOm?opTVPCCvu8@Bq|k7wEc%rj ztULOU%N%eOo9xC>TfKNbPltpt^(rc_&I42oXSwl`uh5uEK(M+)^Ed?;Y4T1#IA@1eKt#OK}ai4QOf>BUX zfiJe}zzlW=9Tep8m{1SC1bLPC^Xhqq6&I#h2ws*MsOGOI&1Q>j(ed=Qwb zq(!ykhD?GPvKmB==)@KE;#CB^5Ye-MIZp(sx}fk8YNSLG&9MrDAnm)N@8RMnfh ziq5Qhc^%2^-0x6z>NiA^`m6Y?V+XmX)Zal~4G5WDJckz&U8f?0YMG0@D~BBsd-wlC zsXAcqe5DrlDzDaX1QUYxWy+-JUMl+C8>B172LrMUr(=CG0P8}o$GT`B?s6Wyo)WV9 zMAhiq;l@|m8kI|s52au4r@@|1IUhp2f!`$6ep zpnzi4H}R1Akbm;SWj@{Oe_CN4x}zm)K2HGi<|vhW8!$4v98oCH^+TrT<||=tOZ|?d z)H|+1aMcgsZQLmH_!ybSeMOHAbtZb%qYtR5<(CGz3*nhpK zcA*RC!K~dThF^YVT6iM`PGP_ZE>}!v<%*o7$h^HK+Fe9luK5BIj%0j$cuZ z&;5E-<|}yFO3gxg{6-Jqr)R#pV=~seZaGq{E7)Ej=-2-) zept_bxr=ze*Ne-I_rLqU{~(Q?Rkj}li(jsOeHaf{ub=MaoVxrt|M%Zt^36|^`ztv5 zQ}0c{F}PD7D4lq;-1K_(Vb6{Cn-D=dxV1e%OrO?CMVMYdXFoE6h!fmn&#{%dP*YP~L%g zEx`6X;WfeoguUMHk>J52N-tvym|a*dElwXVC^Uxn-4jjDC| z3)JxvVHe>U0`ejAEBR;V_Mbp~q0a3$G90%sRqVa0cJGnOy(a?))`R`&WG|H4Ti;_ag z3Fg3Du0f47hB+_aRXckDZ~zvyGgC_HZ1OE>ZQm_-S?y)koyWmqSZmPe@AWI1nIXe) z;+UK^t!6TfB=KU{zTV&+k8?((GZ5M1*K)*5D@Z5j|GP5cH97wu282ecv))%MdLnfR zDLdA2(c$aJ6`$9r^xXPRI&6J!p%mP?zBifVt83ZyH@wEFMK}STOph5s!1LgMLtvu~ zBGHyXA@%~PouqYhUC(KHr(8cfQm$)Ai%b?~m#x~{&QG4x4!@aY{z*Qtaci?yKW(pX zdAB6=cPI(z6zkWZ4JbkV!5{?zDNgo0b+UJu2SUjUjXIeN8lS8+m7((ebI(j|;%hjO z&47$!wVT1{Wo0rXl-^ouyz3-;V&F*@i7l+Eg+VV;@u{hl>>~OJFWZA& zwuf`GxpJGqE*OV&mnk&$JCni3iDdP^0m&8Z?*0{7a_!hk81X$S1YdHM`{U`<5>kj7 zqoN5TQhpTIWQl+ISBSFF{hTk5{W+Q!WclC(-rBvac|oYFGbGV{u5&?3mEgeNk%U*;^WXe>3_t--kL7AS*j2h)j{kc;v#_>2em(fVd8;Rl3GdMxOj*hiU&m<77u^uFtp8RDLX>i z!*_B&!8aB7(69eu5SLug-z-B-nbhk@;4VXmrv`nzjtcWq12PTrIh!V;|Kq^>A&Cy9 z3hC%}qKfn-x$A!nL#IRe3vWml)gH*7JND30R~%9*?gI|f%ju&vf>6{8Ytb`jMHO3g zO2mnJNveAX{A!4|Km&&v;wREHhPce_b7`0%uAMd+jxfZPFZC4va&w{Th&M;&@;En- z?XcFcDpQi(*7ABuxAiQn=~Hv_L63vod`F~5)tA09n@LUDLvJ3YNiv_p4-W2cvJ_8y zn%&e#XphOXrDCViS!?blK;bYkj_U@34 zhgKZ1<$^4A75r3gd%&0s@V_Kq$2su?6;eha)kgC@4Yo7It)ne0dOA${xmTtkE$lfB z)0p6bH`pmgR!X~Bp9MVoai0ZjReo5&b&TM=4)ta-O6wz2pR2BOGwwgx zD2I}G@IqPjzu{3Ildv#lt{Kds-$JVnPbM%VkfAsGrcM@Up0S=HCn{qfQPYvw@pZkG zq#qEOg#ty^)Jy6GU&{ww*H1U(s=9`|!R!MyC(Vg9)-2rEvnf9<%S!Yydzd*$DU-$0-Ve=`I<1%5O zKR|qFG+8lT(X0$3K4)GifU|XBsQfd|N^^Fz88}iG;U4$DYPVsmzWU`gT6N99Wjfeh z4z6&`AyEeCbbR(-RVnU0^)`~9PN~gpNthVpW~S3#>*IA&XiWX}&^(9ldKiejR6VIH zb{eut1+{tnk67#2d+1`gyMv*~b}p;E04|BlzMuqkjow)cktYqM zrZGZ2wxiWS#+Wzcy5sVs*?uY+#+-~wb!k|tykkNob4Nz5|L~Sb!N}^`Mc4FA%BkkD*oyc1C4`kCppBZxT3!7 zjOxW>Lob7J3y{+&$r$DI@5Ea+58{j2-wZvM!o+zzfkD( z%I5!TCV%69m0t%U&NJv`aZn`q=VbDS{zLv;`6lYe{|vWdqPm>obXG5iZwI}T-4C2u zsV{V~f>m$xS%=d)pT}9&=gwPGbUy!%&gVaOp5}#QvEw7SJ=%)#>q z&N^PQqs^BeoAw$1G9yvxjU?uC^m9cCw?f5dhHdojHd#hJ#Hcick+sW* zrO`F?U8O9h2akjE+TDxj(~Aw=U&l4Sm(ABvF(~Q31p3 zU>H74`XfogLAHTm9!|%0#zuHWzGFOxz2x+DzF<+n?PzoS6oY)7{_RGGBs~d%90+Af>huFjU<2J$3_U%1)%9||%j>Ny3Z`_Y|f z3o|Eu8apbS7%z3H?k0gVpRxtO^^uO~_2^&gl)Yca2cEpX?!78q9Ahus%jkI$eQhM> zQs~^apU%^@rRYcqv;1!6hYpRBI=Y*8$HXUP&D`qjdX}tp8>`GW-K$p_Vk;BQ8WJvzysExlZWH zvOVqUK^#)bBX7B#_vZ_I`{dBt`}%FYd-2ox%Vupb=PPS3JnoKr`nBz;_urS^wdfxu zPwzY4fB)IGzn%C{phwbupOs<6BP_pQMeuE!Tvx8BO zZE}A6j1Vd7Z&W^#lS^-mOrG`EUmH;GCxZtnx=9~8 z4JLi7^FG#c%$j&>WSCj?zZ2xHw_vOOr@Xl1t?Qd#v>QeBNOCUfgB!RCuBhbqlqkG= z&ARyM92DBK)U+#`Ox>ShBZ>d!W}3D=18^Y#p!%d_e`5*-n->%*8A;rJ+2y>(QGnVO z2F6qm52tkphR^Q@)g^nKr2vUq1z@d-YRj?qFyt zN@)#g>XOo0(bP4yR6oV(MNQq(-)-uip4ZePJ*TN>`lhB{>2EjnPS0+#(%)<`bEIpuUQ2O|$!Rce0hNO>b8k+9ibW*x|)5+;BO~capO~V^S1$`_3 zqVAv7Y>dLy=rf<9@yWi)e<>a`5%nvVyqwQWpPBiAdDSTG_&tcTfsVeOqp!Pzt$Gr^ zp1ZOc#J7L@4gAVacmwQ+6jom%>?XWJkn;W~1f5qG@Lhfedk`$b(S+j&ClCe_h7g7k zP9=;We1&ihVGLnBVFFAV|=0E#Z2?&4jsx`GngDiwSiE%?Hwt zs(bGie6fWR#ygLO^nngTI&oeoF^{QLelTw_3J^mu#R#?RDYN76f!NJLU^BGALfQoN zo`j+a18{998GO$j^hM`?EOLXZVtV)(tlPm_=Kn^j(Yhegp(nHw2Uqeg;jwzAR%5M| z)x(S;Ep1l)X#$OjmKDpYAIOV4iduFvib~A4MJRksG)}^_zIq^|5e&RkKT>I~>e&ejKa_vjOetMieN*NEBr4M3z^6L4-77Qt!GE&5AHPdln_7xK9y1ltODO z5sNZe82gHtnTdcEP{e3O81awHpNQzE2vcpn8}Z4{fg0E9`nZAThfh`o*SZnkRm5mT z%y1*7DZ)sPjYHC8(Qrjbzbkd7o1%vz4Ad~XD)hsj5ux*?)KPB4Rz(=;j^w7KIR0)T zOmFNZ84ZUkb5eC1St|+6ABBcD3`k5Q?^8Z7G z+2}5FBVPO|5fbgCZzug)y{SZ98B;VLk9r2A^p(8bp>NWdOJBs>ReDoLrE7S*KyT`l z^x3?foJl#Hw;p;EP-z@s7k%^-KV?tep4Xei1CT#MK%OYgP=Hk~3KyDz!!%Yj;DhYI z0b98PhY=Vw1INUB1LymXYv7ov%#2$2cTHs)MrJD05b~M6rLVYC*}U_`H}pmz^bf*G zgntrbkNYm+0HG)I+lhoT2KqZzO9>=^C9$ z2oh!xt|crW*o2=F{y_K#;TgiKg!c%a6N;I$jwK8wj3QJKrVuV8`248HpScX9+)H8u ztQ|n8e0!*J^PJ)4N}7Olh^1THpY}!qsE48>Tfx%)QHH}}djmhOG^_if5uG4Gx`X%g zTz=5_XEXN6+Czmng=YH#=x@hRg6vmFONVz(MxW{DlzAkzi4$Nvn$x2UzpOn{C@~rf4rXF) z75`KvbRRJB_RJFOic9dugNL%WV-ur^GdW_v9bq3*SNUTs-yW&lKbMPinC#UPpF5`= z!w4{C&MZm)S|4cn@M^mBKT8F_=&yQYAcc^5xs{geN-OD;YVy`0pM zUfvs@OD~5Z7W-(QST?!xA6L~Xr#`dZPD3XdO|M8|q?WjMRjM6_p_Omm;C9%|-7JH< zK`8At&UC@!gffu^zoq;(^IO92S}EWrCaX7^cwGg{H;>asm)o7b$|)D0mzcF1$Nat7 zd&WyKl<7qRZX9N}!J@v;`7xIY=^ht&;cEfJ-SbS#Fh6w*cjv`=DdCMIOtX1n79s5z zlXjOg;XL-218W6q2A^g;EO~F0fq&9)0IOvj0)N8-`iMuG;N5wGZ7P>pnpoCskyY|a zz1i;VHOZo<(Jba|v?5#0MJ-~Cmld@iUx4aJ>>DVE*Pxl+eq7z?Kpe*h8{naf)4iN4 zeyLK7M*X-*;w#<-5wiC_Bb-=N;>3YFlqjk_0nXU>s{B__{UJ91VdEYK?PRvVc+8TnR2NEI&GD7=dHGXBW90G!J8x43$lBzY6THMa*QuUVd9 zul`TLIcrMV&=pWYSI<8vx{5BR2L>b93?+-!yvt`R*h0ln;lfqUc{o7~*0;ycX30bJ z39Y^3oI;!!JQ|RvZ^whrgZ(gzXmDb^ou8M=J#)MWKVqc;(@9&$s*}b@38kA#p|>qNU5Ikvj|2yJNyjz5uC1*$QpkfHw%Y~{}9%tQ*6>g0LZ={%NFOAQ5_{x zo#q?k4^d*G%wS-S+Y&n;ucT*01~~|ou}3XN*DG_SMyRebFK$WEtFDD>IQGY}p5d0} zeb${{VW5S}55`dd;HjdWXRBT^ZQbS@4z#Byi{CK#`g-M;;p<`c%43IE4Vw*JfV1Hn zgfq_9wi$`}AJiF34(1iIk-?=5)Savj7V3U%-K}x88O_fY?&mUMZIe852 zdKb0JaPkV;g%eDsxxlmBMm)c-Y19Vh)(=CKpT`kEeaKA(+($jnvon zx-4&)SudEH?|qDsC~h*-%PCx`Ac<#uB68f}Jlaj9Y|4cwNp4 z4Ba;ptLbj^86x4L6go8_kH={>41h=d0#^xEHYforjpw+YMdNYh;g(p)+Nn6*zdy|I zi&ga2ZbU5($sJQdV&ZVJZ%5QorkOzpc>Ka1P6SE8X5T%Rc9YswCeIonu<__`eEc8v zNA`v-uqaOb6JiXT#TXX->vfkgY>t7lUCzRFx^tGMRvBhcmWXw_#KC;Ll8<5vYt9t= z*O|Zl{{Y@y0F#0Dn}0YQyfq!+edf_Gfwzf|g7w_n#$Y4v@TEpn)RLKP}n{q-A7F2dEsqGCD-s&yXY zbJQO8K!DkYeO3li7^J(G3bnmHd6T9ji>B<>+w|?`8Xjl=DZJUOB~rf5zE|@TAZFecay;0PST;{d)77v-|`c`UHurMMH~;dI7ju|>m};xCCbV0wx8jh z>c?2{nRGb@Oj#=lbW+?h-q-EZw__!j`+vO$c zO|Cy$w{I4gIQv;cxjn7SllRHy9|2BOeO5C}dEb98BCXTD; zh9A>-@o0-}AtfKbi5e1ff&5cusl;vWiJ9}_lR%%~W}N#?6K@;2uF5aoS?hkQa=zn! ztID>_`MJJzX!SHdU$)irO(N6k9C*OzXt%(UoXRGf%G{d@IUTqSTxUdTBMzxd($h{T z?w+lT^;h*d*G@o3HoKpebRGxIMc4IYQ)7HZ|A1oHuVa(LL(ak5Sv$_ke_c2hxQExB0=f#0K%7dq-=>kF@UIkhe2E;K4xp><9DHqt=Rmc^JH8{OtG* zH}Ex`PfZ}-I~)G`6JNA*BMV+~LUz&C9tDgYXr&Yd31*dB>okw_ediU<=!Xk}aVIfn zXlwr};XcADLY5uy^kaPg_n{^4HL5`gE&;x_c-f?qv0Y;4AQ2deL_h}^JeKq5iVCI< z;?c+n5>wfeU&?v5xjdIx?S*QVO&YyyYGtqxRy}(G{IllAuBm-J5WBLoKfKzp$By6vL>TtU7vmq{xeu2w>`;zP{ai&1%#+tK zMidtzuuXn643)nl-xX9ZtAN&?~M4sP=L(=)m?sQbw9QAF|IedZ^=rzX8{0>#N*2Z)y(k zM&9X_e^yTU@l$eP7whK3J}_IB3%l5{d+_hF5($q+LI?hG);-XNm@*Y%t)9N>;VuTK zv;Lu*0jOR<k!m852`FU~Lx0tY&py!_u9wg-Oi>@lxvdb%L7mY?J zu%fm0Kwy5gL})rluHP9yR}%jcyvo(yKD0zle?C@jtsdo(#kgzKTUVXAhPeYzehH~f zLl<4;t*TijYYfiS=!QNQLJX6LNyGM_wX~E>QZA1q3rBnYq;%J&^)F~P2QvM8eY2~AZ)vZ zbK67U!>HX`E&;eSpadq*907QGQM5mC5gp!fJf!}my`YeD=*RrjHa?nhke_<^Ry`X<YesTs0%Dyl^mp~9nxU0}>eT^L^G}JvmN*s%ME1%* zqa2eUKZsFZ!^KL!Iv*ek=4|GFQS3c0*4pw4b!{5cQ-hqEjc*xbHbP2n`E zFr`hCnQvXnl~%yZ04mDOy&*^IVeyDqKiyMO`D`4uwGXx4J`wNZQj)7yC*h^1>VuOo z?xohL~`8o^}eopK}oQ2TB+LF2Z`sA&wqBGxACkU?g;nwo}ASEiPD+*r-W}(u%2&b zfav+#gp~y83+Y*c^cM-c2q}X6u<5yg-xJLMaP=><^NV-h#`t4OsjOMHh-2D`W`$_) zf>@!J!nKQtbeo0TCpi)KePZSil1bhXZGV`1F_S_!YS%vkf6OMXnzsmHgj<;qmXXY zF5JfDCfLY5PLG)JyJ^C?%rwkQN%NX;&zO>rYU&Cla5RbPtv$WbIQ!Gh?>0BhCoV;T z2ltn_w2a?X{0`^0iC?xrT!WoJzM^&ZIcw<@wE=ZkHgZL=Ua7Odx8_gq83@`R1np07 z@nHJ(0E1WkddD|JMx6T^eAhn)A|fuR=|R~!Soxa*qK6G$zB{$MQJ2HCuH&&ST zO-1(ef2(~r(ICHlR>KTn@EX)0m9`Z7N|!7f&Hya0A@Ik#dHx=nCV;o`YXG+DrM&0? z{Zp(?{%-*-!tjBP4L6r7WKo-;cM-VkVj%E_-9Yd6)P+2oo;pV@oWPrU!~Gh6J2i?3 zWm(2A^jW(sAI1r`FNkB!g>CU}BrZtF86x8YVed_X#orb9!y>hj@71e^)!3g^Z`o5_ z`!;S!?iYQg2WETz36SNFH!~yXcvFbIOTYQ?r7jgbtKB739;PY(w-9=r+Uikx{E}P> zNS~a+ry1FIO6wH9#j3C6p`sZQZWvF)gc}T1v}mHVe(1@$`HnePLdW&d-e-;^<=D45f zp}f>=1LTD;pj!C8!^*1d$cOCKMir;2|FB~R2}CR|$5 zDCj338kvk}WD)_1Nn!$0lSxc(6A+|Ks^FK2Wzy)>Ym~&gcJ?zYVyV7njXR%bmbabm zTf}{PhPUPSeBR#vIHBi(_I5qLK?v|H|3>H18trHFe2Mp*9Ri2SdrSu-T`I0_3e=7t z5#z@7X7OWPc|CVTET1=f#Vvx)PD(#)Q$sEPyEHbe;%9bi!|(aYgXjH43lD$oQvERQ`b0WAVWH(p_$?Cd>V)v2WY{+vT@1f)AvJ$++M z;|i_nL2=5-cU5BYQPmwvKBOw0mXA%8Sv`^u-J+;J6IEvt6;ZmrSoKmI)6Mc`bY_dt zN9VklnKJ7CCb~aQ#f}r+Wr*7?Q%u&Dn<#|58maD8lw;c-lYj7F^56AR!@+A3qPDT zbM&z+_;h3NMK&M#@vc~hYgk9@`-PZ@{U>2^S#VpU*L9u(ky(N%Bw)%)cGHj1b!q71 zJ~r~PpZjr6HQ0T(xUx|*m`%b96eMI{R&Qe(1R7;Dv%H6W)BBm5w!T+)TbTj?D*$lt|L~CC^D)^2!s4hTI^_}N|zOo_kPgTInAd8 zcf1OpI=D>sI$sPx#->E~$}u%-I_QcdMX!k|C5qB)jpn{4R=zBoSll?N*xMBFZmpSE zJeyQTu5D6~RqwD*ZVpqHV%#OdFI_Q4hLovKF|IX=pC%WAV!ymvj^C^mr0wR%WNMI_ za~6J;*W-{FDNA=uBbDKfF%s@~jFD1z$C#2sVx-XCF^x3ZJH{M+NQ{)GJEoDcbH^Ad zLwAgkx^~By(nDgT!QC;9oCKAi$^DAo!$p^dLP{B zeMqPGp`G4O^4~Lbcd{Rop@Lz4j0eN;PVeQN-cRZDeyabTDe<&JVovvCTG?A}CF~{S zu_hlykiSEDtDi`iNti>ZC;XVOitq^GX~L@n>1^jS=^jHknJ}7g0pT*jO$4pRKO{)* zwVHr_X!{O=RPqiIN|`}5&yLniN|;Hwj-Y9O8R6%IKN9{yc$)AsVGqIQKXp`52;=4& zt|iB9hOLEBdH)ID9&{&~CRqk!dB^k|EGNhPf-J7>=wCXdHTB-)=b^#L)fbmgsy&@r~iRuU&_~a2JDu)*HFTZI!LPW<2QurcK;r2`ysUIr0VbIp2Mq z-9Q__U2~&B_23L}x1bDz@`-i>0Dso6Zq@f*@3K+kXvfj1kh5@407=UxQyMe#F6T5# z<4nXUJ4!ki8GBJ?Zf*%W>u*%?h~_{ihD8cn-2EY&6UyYmz0Su=h(KupThPqmFzr!) z%}QNBY1#F{wO=Z+(S_gpNZyk_9TVF3x>fs2L5cB0TPU!B`%-z6zD1&Flx!$CSRU(M zzEEbj@-hpRXRf^7$BFtTZfN+B@FZewH!83<9N0AHB3D5XEee^0`8qf>+*#iT^}mHq zB8sV#>pZ?2?I?O;Pkx@gpT+(+AoFk62uppFkJHnd_?#idiWv9k4JN)XNOVt351>1e zZE?84^oVTXX}&}%>i50Q zh7y_|-J4j5)dTlB>@Ev+Yoqy3zK>&d3>aj6MnfOG+BEcgq9%iY!D?tmpTxc2-j!cr zh_-BCwRa?WdhaWBEQMkm2Fq$6XOb(u6TJyPG;RtTeyeEv zA+gu!qL1cB2^-DHWGES|1qdVINfwoxdR%4j)&@;_O1UYxH4j1hir_y<4VwrYN%N6M zl%|O`x>~`l{I2DbsmKV`&1op`S8Nje4Oa?+;nuv^SHMoK zEQ9en5pUG#eJ`ICbbk8#FWQ>|hi-%=n~R@A`9Hx=kAbU`)B9Y+pqnDZIH&(WScxxZ zzgw%B6AcKrK2KZVbSi^ne;^&@BP8ig63r9`d-?Q$x{`*UC>X$y#^ZZ#m;7o0yPh{25SwM+G>mg zBrIqx6vIIsTr81XVSq{|)%EO}oeTL^-6%;$bEv(FWoJThs2w5M_&g4V=x;SWmoJyJ zw7nX9q*7%f2g#FaZb&t1YC&n(8t1;-{YvdM{$Fupd~q?-JR72+M#TxSU9Qu2}- zyV^ZLx@NQXM3<_sT$~Q01FpReGdgjI?5uRWJm3cHe`7thp9T2T>Fg6ocA1ii+4Pd_ zyWS-0UcD(uqGBatBdliDqxGei;q-Q%AN_2tua85^>2|Da?r`NesIitd%f)bKb`EjNsP3k1Hftl{yxqnRYB8Bh2X z$O44Dr;s^W>p0)avYUPP*FqvZiA~9Y>Sz-ve|#EGcvN2r{G6?H!)t&GRQn?%XQS8{ zZux=mX-#omvyGKcKR_eV!91@7ta!_o9C(${L5`D@tp2{*##1-YS9BLa$)6t5G}Jcp zxbQEQ(Y)9-!};BV+)ZTJ$~*p*qzCCWq6#gy>?)Y8h~IJi zO~w6FsN=p;{s}kU4$YLcFk*7ze#u#4B)nU2eLpu&DC(DbjNYFbiBAUO5-GItauX-bDb-no=;B zeES^3n;{TZ(BRWx1Blk`9G>^xT?`r&G&ygV*1HBXzQ1dm!t+WBdGkK`(fJ=ouEdcC za%@Ud`XzTMPqv93dflAxq3a7q{x85D1Lz%r+K6;NyFPfQ*bR_6Pu<{C5Dj z2Y#<8H|~Eje&@`AXEkos;maMuRy!Gb$R1Afw*SpfbLYvb&&2uX9%k(!0Vd zHZg9Kstf6p#cx^!qiqLDV7%=>Nyu+`+^g;47e-tMP4=-Mc3!HJ*AC?O-K(^WD2Wef z*mkXLYl_~~C$Z<@Gljz||8ML6;{W;jA1myC@rTuK{uTN2;V?Hzze3(2EFv`Ndp{!5 zUL36=a*S&g`($2d{p$2fp41}ZYi?sdv<9pv!Yjr7iQ&YJA9}u`>lPjF_|WqecNP_# z*XTRn{@*y?vfsb!{Qr8Quk%iS2ETIfqf*-0{-$FgmprDEf>Asv@iSX65EgvjfX%S) zJL`us|36tmpF&NUUqXHQtSRDoz1sf`6uL>lc2#?XibD{B!*B#frKC3ocsK3Zrwvo!Zkywp@e-j?#6B zGZ)$B^Zpt~bQsT=_4e9k_leR{$)NZ|FeK|q;cu)+N%)q}^iI?cJ3SbJ@c zjVQBws4mr@lhefb_DCF>LMga>v>_m|Z4j8+KA?Ve_zP@LmJ1k!H>@`Q5 zL6$jk#kk?Cg-A?&a8w_s(Z_Ia01Tz9p_{uX5q0xWVw!|fydU63AD+OsLIrYRf6mIE zdkISJ_`R^uDK7VVX7utI%77%=cV^UO)|o-%r>hLd!A!L1+PMpliv}fMxzh?aQ+Q>B z<1IFz{3=218R7VlUy;=v6>Sb*QMG<51ji3|IQ}T*BOGrgTXQ(O0cmM?AEMXt8g|VC z$NGq5}Mc2_30St!$vD|$1CA)_yCPz3y65a zaobV>E$bvixjg5aF6bl#!{`cHni;heDt=qTwz{(s)xwQCi;Fgv435T_+`Q))zlJ{8 z=J~fG@f_#(D*l918Rj7dAB9et$y0a~J#q2D*YK+UZjAfG7f^A$v90d=;elIBx9`iD zyu3J<0itJ~p`nkQ$qwgKG#9)ExPr;sPFMQyuRs-&#Mk?o-sW`NcN#qie~n3yENfhr z8kc4GfGjtYC3ivrkZywMrA&h>Q|8J%(_C2XS(K6Lrk#_E@!VlKXFPws!s`wIkCpik5OXhPN|?%!DYmRbWw8zDWhqByJrriQD;r zVlkZlufokbZ{O@+uUAG9Yo!pKiDpJ8oMq)HNux{Q(N+b&t%~D7^5GtxrBqDT%-`d^B34owTgL6G_5?g8iL$| zbMFKY3N0~~?qKrw9vS4WZMD!xUt~xdYqzp~s32-TD4D|@eNEO85n@XlVs^uALxX%VqGQ@`W!CGj*tFZ>GGBPvVd}aCm zFs|#+v}?e*%fsLQ(|Rnh6!iG^7yEh~-u6G~vB+UC`G$v~#OG171+0Ly36LwpD;608 zcV*w|&5=R*lgfXr<$o?b@on*<_3FIjqey=XzY(U+(_%R35bdrg6F&c(qld7DN9zko z`;%mo=Uvd{0)jDrzYKA9?B}(A+m zbG9Pxl@iJb|4K0Baanl zk!O($vTq5s-UVzw-6e}Awh_2PY(SqdMGFz8JlMQ%*0#)bd2bmEyCON-KrB+KOu+ywxQ0Lf@IY6GMIDnNGdD?qmLD?kF^JRr5i zS@92B z^4+l3!!hiEZ~l?O4LhXkw5b27yeu5_z-%*kf&?SGm|Lf!*BrTQmvf>6A!x>|A+_VAgpqoWdiXrL90KaA?;-`~!tkhLHp!a^#Kzh?D!RbNUMI+h6dw zfd%CU6=4lH92uy=n!e-g-@e6xu>K8!(8)JOzoYnd%c+1MRahs@3TpOFcWSIV8Eczd z3sqzFpYqVaUHHTrZ+&>inL76-Dx5x=Ja+*L_Q+}MeC(0Kexx@dlGz@46Hv#gz$mys zmI(E%3Cx?Zrf;ixT|Rg75`iE%{PI(U8yD0A+|7=xHDOD}2z|x!S`U}v+Gd*S-3T#S z&4|SA;kN*J=W_cQuqs?`A8QOqq5Kbx2BH`hgDewP1+&GsCy-kq)Z4@ne71}5L88a}+Rf+`a&x{@}B|KtI(29qD%0LI(`GsgrM zUvnNOvg*$6tHJOx97K1?dAo96Y;0f2jYg>cEe2htY}!39dI<;xC8f9Xz85?Ji3<>9 z5mTOC8NRnVzmGHK$8KPd-ds8YQOvBhsdEu5pGdPYLlxHKG~0Y!)Krw-p2H(+y@t7p zUeo#Q>*@UMcrq(%K-ND0BNt;sF=WIdmt66n<)6ol6bf3$hc^&xRxA1#YI}K-G7Z0q zAj+)hlmAO^#L%%7{pS~9X+IwCWm!vyfvPDusp99#!FH|y1!i{CJV4Yj+TqMSLetpu zs;Jt=D6Q@voa>MYJAA3pnHIO^zf-GU+tte+T-z0wXCE}}z8szula(T-WUGhG9gvi96>M33oL+{jPmi&{U`2UHlKJ4Is#jjLld>`USBT zyxM<48Sny=<5J13%LQgwfuQ7@R-an{?dG^-OQ>+|^?7fx=IY%7bNNkUS5optt4xfs z(G5v}1PzZCsfaX?)uvK_jAoZeH-&fq*)S-ipJDjz>%gGX&j$Vl{F&@m>}Lz%sN|2Z zUy@iscp#J$RLMVWY&^4ZEK|;*obFh|Q?!)yKWqL8E>2ezD=l*k{sgvOi|a6!)*ALOUc9_w0m9(!UPmoLbHoT3st^ zF^`ozmb%BWJeIk~2|Sj&$0l zXO+B<3G|9nP$~BURTSYx)9O*M#EB3HNVlYR8-BkQ$*C#!DFS(V{NAbcLl(>kNH?}mS#PQGFx)d z_cS&)KLvCUz3l}1p#3cJ<`VF;uN#!9vg84ALVHKjFPvnbY|6;b1TbaG2-z>x5KOW! zEA{^)0Q}_4LI7L{#oh}53O9m?({cfm`}P9o-+KQ%jVjIgePD9Zy8z8Zzwl;DuNz@* zt)fs8&!fo5GGmzEq0UUj8s5YV+0-%7d~{=tFY@L$ikNwglla%xbNa@V>Yx0~bWr++ zFW6eX}ASvmix+ya2y^C z5(J@d%6}f!uoDDV;N+CeTb^|{!TIn}B%Bjt!wwZ0bsTu}TJ-Ext z0|0|aezt;=bx zSFzRmvVGhNVe3Et7LHbX|gwea59{$Fi=Z5zrY8Z{r2o$}ejTl)P3=9xXVYpXNDtFb{1? ze9|TUhUa_sD=7HJm#pAT%JT=8=No!H$UUE-{O(kmYv_KL|C7q|OV^pX|K^@gQhs-Y z&2{V(F8_g~=g<_D>_Q69Nx|_v5BD5Y(4}2sRD+T`*;*sFKv%;{F8y}pcgOc!`Xes= zMx~n-CF$&s>m%bnA-0T*V{238BSH?q54|3~&ng=^; zlb_ZzT#F(^IYvL%(pzu>QhzrXO;!QT`7aSu;# z4}U{g>%;l`7=PpVo6KK=KLw(=g1>9|!;!Lg1%Eon{YU;D;_oT`Uf@sedx~{3jK9(R zi9+j4`Q`jI@b?q`?%?lt{N2SLzD>Qa@Ym16?s>jMaTKLZS@N2V7isVPcrfJZq<=|! znDpz~yW{-R8TV(mZ%tvw9`iI|%yBn*m@ zj;!BJUFRkI*$==P)jfbwVfLfQa&?d5C^Y*4B(vF%meqYlYAkq~PK-VeaQ=lG_`-yL zPIroX7souI=8)Jr!^QpWbzyH19?{p*EJz2$EMY=<$y+(#K#y*T=_fiN~gPhIjwX!3`A%im+`9 z2dBLE=_l7wbXrZnVIC4ye~V@Qz{AYoxJzaRqi>wy&wjmhhF`Otnzj(mc|K>|*3IxI zy*|Sqzdf8cuu4^R?N{i{>obC3ggKjqR4@mgtjvUBE+XL-#!XQa2zPKTRgR5&3;7`< z2hQVOwSLJ#I2i8?(u-5xt^wnYE!h{O$|m{8(ds_l0;>KJ#2y5ZgZj@N*U}$(kEO8L zJ{x|QlzwE~i#$cVdU#QIP_(ru0X&7I;c9mdllTX)FL!`YVArILDgTmt__e%LdqjSgv z9HwE7MZ8@A^vAzG)4$|(HM%nCU$H#l*Rmhfb|iyIQ<#d~bYM3hwEUEl%1^i@U+rCpqD<(&5*5@N4$QQpFe? zvxU|1MEI3~jnMt(Yey!%N{u7wP2i^m6NYMKvf>4dV1fOlK%Q*^m214(zr@{%3gSGV z0fI)1jAh;iB~#w(rhZ-W21GmMy=Ot62*Q>-G44GieKD++NkINug>{H~l7gAR`2Q~Oxl+6$jYi)4@IfjRu*fx7Q{??5EZWOt$U^`mXR2d_b)-Q)1zP@ z{D$0t)E)OQb9jdmCZ9;9!svkXiPv=DIW&Ju_A^T1P>GJ&*RHgGm&3lapqXoS@XIXv zOHr(06Z^uB+U1D8pBVx_;9pR$D|gyn9h3}4bDCqsnTgE%1Pr zi}(Q@I0iQLtz;F>MV&?Z@g5=((wZC%M7GB>j}=1{E}`j7G@aZ;;dd4c*A)RC0qP#k z9RU6HdQ#qtDG#EzDdjy6Ilnre(>3!>GB)*Dnt3N##T^l9hQQc=OU0%>p9-e;rh=(z zLZed?b!uYw0^+;U1Y>(J_whZ}KjTSyn^PXAOLxu)#L0Pp#rq2pXOuxVE=f#GwwJ}0bnuOhlJ-sNx4@^UIrFP5Q2zjH zJJAtGC+8^jZI~2tsscjDfw8{jjr+Df*~pc1SbHj%S$wK|pc{LuVY;=u?A$@!ZZ-3mcJ<&W5`ce=7dR3zyw)A*9M^L;GopG%iV zfq3MAf>6v~W|QWfp+`4WRxs|JOC1NQPp;>&C3g3R;uOtWs$@UVAStCGC%kj0IVQ$L z#F9%=mh6uVAkvizCb3rj1p$Scu1D0)w&3*(ozhJ%nT^Foy{W%s?vrHpkx~&F1(2;-rey^-rp4WSr0o`vt6Pn-aZ`){RLpyW_UeKX$RE zcva~FLQG$ z9$W`~R$cqy(zzha4Rz%q?0gpujxBkrls%*>jNL4$lkI&eh{A31<2&M>ljh+?xm>>E zzIv7}VURc!O0ZWPTdMC>3I@hbo3{&^ba#!TuSx#`*pEdkQ&qbpMivq65{vVj@tx14 zsygpc3ar~h;zMW%Z_yzMT4;+e?kz88ewGAi9h2UsGZSi1geS-pBts@5*_n=((6o)A=YkNJa$@}C|84`_&|}QT16W;jH8ajv~xVhqNo=e z{}a%Fi_h3`(f%b3_dwLBlt>fi-`+$p13kIjlAL*Ga4e&E*JR-s4e_xRUP=l|G?_(N z2{^*{z&8Jsw+_v9tl>}%CO8c{^1rGxFY7J{cwrk`EHjH@$-tH_n;9GDZ%6>2`a45? zj3_y}IMKU-ts@az{SX6<0J4Me&3;g{ZwGW^2je^cLH-%>AVp?TLc4BX?(@m$olg;e z)PwAG%6Nl{jM^a*{Ua{L;k@MqO?hkrlRF~^kye;XAUz9+m(mT@>)Qab9wMS*9A{qK z)d!NLHD0#1>G*in7hs^C>m?jobra#1zfkGk2JtwB6uog9&5%sn;OeFriF1|0u5ody zDwn>Oy+49&!=p6P50H6F3o5VP{ebN=3?NJPEQj}~V*9QLrb+K17SOoX{?ekywN2_n zPa^YpQ8l-YS2yj)YghEDPaP!b6=J~kk6&me*I<%cqU5Ca*MtYYZcMMrPj%yK5kz7Q zl>$lI%Y$bIHCGgkdtCF^gy?nt!PX$X4R^!j62)P2dV5qn`_Em4BV@@e_;?L5hR1ZR zEKW~Idhgh+%zgX2mQO|qSL1cojA>oTjpY|WU>7yV8d0LMmLJcG$K{$VtsX9{G+I@S zz*+Yc3nk488vhg+@D%fjKL+wu!oLd6=UQ@{+nw+xar@T!Pk|ZAj#!cK&ge1O%1?JB zysOA@Mwsy0lKvc6ix=WMA5Rv)+t-DLBhYybgm-LpHGPm2GnCPu?7Ww ziR_*KS6Zb~0J?5>|8OeZa6S)zbalmtjpyoe22{a-5Z*pwKw}f&?2>5!R1WmVI$K}i zR)ZT-qV4q|sbX{|PCxCuFcRJjfP()rn>LPCgf&Z!c{_F!8huf2`1~0_Jz@>_Pbh%m zo~$7)%rwGi@^FYg%%u-IT_Asq3FJ!s;}`Sn%1riYu?oGuc+rvJjSz< zw3gf`=mRc;I$YsOaIu~WgolzjRrLznE!{Ice^qL!+jbG>BbH2@RV;JFTj1%(ta9pq zJ*p@?=`KWh?jWF#1tN4j7pjs}Or*IOy?N4I{sn>ze- zOPql3y08xK$-5sMsBv-rmj&|vRz`%p|J%5`EJO-G*i+5xj%V0k}l5G?!JrYOC@&^FBxF(d$whQBXrode4W06oJ8Ygt#ECc~@%?C{OtO zKgbZ7RWm)q7K~2%b}@*PN0a^?f`4`eBkZ35Hjy5aPlw$|z|@CBj^RWZBYGFfrv}JA z`Lx*vy~+UTUj-4^i+pM&oP6r?t#ze7D4#~%OzeCrRlK`TOl_8!`c?KbIgmFr>Vf+e zGCoTFnZYE84^iTEh;+I?{=Mn`?DxRH4QPO2g*(GcJ815-&C>qHnm83vG(8xCqlJ+y zHyRi7)m!HJeB?Obw$6m8uNMy;1I!ldXxQuGUm!8oufV#{RcbZ_dn*>95`B~Aai zlcE*TFS~=R{jDGFSLob0e2hA(iEZ^~=9=TF_}B&$9$-*QBamMyoCz02)3**Qe_Z{fNyjwRCX#y6!?~ z*1#zDK9-!AZoc|+34akCo5RFxFg zL?3&UTU6NLDKPd!wj_xJ0>iZz0Zmuq^`f(B!^>t0wP`_9U9#$RQw|`m!@s;6-}zU# zi-hi<@7D{QnF3Gai)|lrx`8(#T1jskJQvRF2{Z!U3gKCN@q7C2lmgUjN&$G4^`)Gn zKq;_IN&(U#6g_~jgNts4DHg|OE)DJw*Fcd*XA!o|O%Zi*dQ?1YivZ+V&)Krs8&=3$ zAsO6R1oQx+xT>W0cA-p%&MF?v!m*4it(s(%z#;D}aP&z{Vq(k#(Ih6kP2p?sGB9=d zsZ7Rd!^F-f6K$`ZnkxPm=D%`efi*OCvCgS8Lp$ zNeqmbXe)rP)0wT`+0@EiX_Y3VZxB{fA~>%br!CPR#$@_)u$TRx@uV1!ZomM&u15lb zd40kSX+s__54Z8`EErE$qU|3HW*38jsB((I;~J>P(!YU!P#YgH7-&}>Ut|dw!_8w| z$uT|gs&(jPP-UQ(xl7`;IbPWWrM!;VYGV5R8|6$4WhYW1tKiIp_hxK0{w(9!Ja5=i zCJh7>Xwnw67h9MyQ(;VU+L?xtY7AA#QpBt?P-dWS8h>VN^@Dv1joFm-W!H=`Qlnie02RkKyhhw@WT3dX;bParY_UB9_^p~lQ zl4J51g%|Cr8PlV@S0I94WikOuE!G&4@e8p=>9?~#U?nAjlw`(QcqmD&(?Y=UQG)9N zsL}NZ#piYJO>jL6l#sD8-GNZk4M9kFXHg@nzqhRASFJz)ZF$_fQYez2Me7S_{d!ve zph!NA*7G9yS>%|uhe$rH^CL%wNPayz&fBX5B<4l(IC)Lc!N>llO=3iJ z%**5X32k^7Jm@FM+_#2)u+V#dzE6aJK3C1k9s^ zaOX@dHSOg>2P_rF0yrW3WG951Naj=th}DRUwJqUqO;om%2Tl}kA$fW3s60Q>Cy%Qz zyM+pd*Qn6MOY7O{k`wRLR-PBen3w9l`Er*a7x<%{ff2l4Lh_09QF1KY#z zScb{p9A$F-Eb=+I{t)VM^wZ8u7Rc+e6FYf53i7{9JcOC)*`_49vx8?R;6-MVd%gV& z-dzf3Wqe%!S~GY#dHr*^Y&rY&pG;W)pzZo|1-9#3S?}#zS@`WT?tWTbg54@GUH?aa zBdFou*DUqpL!bTHW3Sfzo7L^v_5K6yz0FL*a}+{9(3)&T$%LI z*sl?;&3^ry*{?^fChK!#br#?EV51)6@4bHWHL9pR%-0&Anix+LyB7>I#erF`hr@Ra zuvgcUY3B#c)p2L8<|i^&)9GQjFN~*V<$rU5JPrsBB4c%VKeVtX3Xc8uYVuRAYEN@D zz*#WT%+*IyPi}aB4UwMz`vh2x)`RTTsIMz#1hemiwqs`N&y79XuV#a|HjI^3cgOuN z^kD5|Gx|a|zk27}{4nla#oxJIu>&znc~iUa>VO@>d+6l2S0(Fprj5gn>$b}D@%ufj z)AbVjuVSI%bu4xh<&ejlnJ-_)kq?UZ3l_UR#r>g)iNo4sOBUMpJ_Hl=%nB$|sSw=ZT%?BE(x(m$&r>7PeT z-vvPEU_$!>nh|X&?~*nsNL?Go>+{>nv5eDzpTEsTj}A;INTPEBc49X=Ob~%disQj% zFSilLmh1;;v!CD}vzKEXr?3Id!!Ze_kp6J99Q`n8F#&=d*U-@=^Ei?b{dHVyH0A+D z^iw4M$5A0g7V{&1;l%%w`;F+w{?Lj4H%R>dB9-)Gzu+sb2fvrk-BomWS5NFON zyAU*?tBCT5nCM5uL_Z=9#q5#?FaOZykKJFGi?!+CY4?g42idHjF=aN-LXrs3< z<3F7zE}Nmmx&GXm?}wqivgc3TiU{TG`RfYp`CIrNTXMWj;uX#Ds<&e|AIMAGTf0@A z42H2-hIAk-x473Ig8?0p$iKs^`mfcF+g2S3{gEE3Ro2(s4AHk#eWU@!6q+~b!|-e} zc?QA#t-INuWYxzIJfuDq3~A-}R-sv9@vh2BS@rj|O>uW}3}KDB<99#$i2Kj1`n#2d z}C7uU|;AUQTF||#O7>LqI?@A zWe!Q=ah9H!8)54ot4TrXtYol46eZ>TJw*_$>O^n`_VkWS7p8D7M3thH&O8=Jg-k9E zYA3yQXhst3G$>mT7#lwhaqXWVaQ!WA6(kKYH93Zh?7EXyCR0(G4r7Sa+App~qi<4K*}32I2_Pq$F!AYv28cRrO2hOOppVmz31{|sEhDiU5@ zPlcYK!gW36dSWHi_264DdXwHj?7AMv&}c$$aV`7)ties$--wL>pFUvEUx0Lqs$|mY z*lKPpkZ7!q_NbYsS%0*kCoRGrHS@GYmL6=0Jv5Vint#SWP4~{EmUVd(|9E(lrQ9-??XIE4_N$J*>jj>?Mm{k zoyCflE-nqYt^{mlj6z-DykqtZh*cu<{H{IcaUT8v+iROH#oh0)MCR|k!MCuuFXBxO z0eY3ieU_5+n#gPzlG%tyKw3E#Dk;8rT{Cb@RXv|xMbpds5u@QP+LS*)U*&A+mDRg9 zXjjkX(GF~Bd|Vm(=w)ErNs5`4#%VR*;z2e38IM$FUNZlT1qX1p+~}eA4pGA{Pr%h> zUcjrs5+U4N*)C;G1N~CK_Cw-T7oih;s)(D3tD8Q{TZ?;RY!@|=qBrMF@ZF06cxD~` z1h?slvigSqP2A%?`E}_(CZpapz9g{XJ|eiby)+o_$zP1yq4ibWv=9CS@7NhAiYQ}8EpWt*c+gs>%U;nk9G6%7qYP{~6F`ZIm5RJf; z8N*0TiM=!Lnqt=CZKPx$!et1ZZl5xv9>HrX$okPM?FOwhMea_wr2LsZDO~0g{zV}i z31x5W0;xOE;B*Q>JK4);ce2kUsnR6#b9#h|utD8IMSfxD94f*dQTCjzsN3+&$g4i0 z5i}f0SNmy)@h_|#HhBmqi@z1xASJ=kBDWS`T5gFg%K8hU&q zJ!ZE3nY22Jo+Ml8@eQq7r}&O}s?wYmPN&DU^!Rjoj6KdfogQN`kWIl;+0(bGBfC*A z;KeXZAf%Dm9r+G*Hjgj8v^ z`I(*k@_l9pzjSD3E5CGT4&_vKW(y!pv6(ITD+DzAGITlVwdRq*P6(xf3pMG0&(sn? z5I)qr9$-Ro$Ksvd%I%>vVFoM#FU52x+bifj?$n53Wk@$L%4h--Ub$nL7Sh=(=_|Ea zSIO@Bnrw&ksI%t6sotjPDK;DkSbHMV3LnwhkDU$U=(F^N9P0QVbx^(eKnGDkSO?vu z!|EWN{r4RdIIZZgcJ+h_=2S^vZ{iWSrbOVH;J_8HO!%|Lf@lV>s5`R-FTAZ2J{!A# zzPhupGU3e~OJ62%8S)hR6aTjWE9egZo0Y2iYi!9gOfZmx1k|wk9%~%P04BldfJ%~s zR~bhmMY_j@*1q%Hh@4G5oeXd6psu(#wTwq&(fq(MY|}y2L9)EdVPi97gDnxkHaQxM zi3FtNXs{-dkdnmV&5KCGu&d7IHhahZlWeruui3yH&E6oOY!Fa3P}>H8qRYCI#Pap( zMuSJQM}tT;wBkf1K5fvm2^l=oWhaR7OrQ5)P*vTq4W@H4zYk0UgHu&+6gqt*7z#eX zc;j7L8F_q6mbD2#1jJycc5(_1Of)7XngVJ2>jJsiFVH%rJj`0K@rRiIhnzoD#4@fW zqczz|abqB>%mE~?Z>sO}$9;yJIgaD(l9DO5x%)MbI;-pT0)oh(G5QViE}Ys#OrD>d;JcVp6SMTpoZ5M8JwpbPyEL)gh9xF+B?+k0i$Cfx>KXP3haw(im&YF zKf)ZIzl-@j{}C44`Ma3zkU3_kY0~zg zoYSN|vVxLW?V!v}^5V`P_jOM8-sd$%Zqdn&%*jQ5BqjBE(Jc6-f+5v2X?^j_h}|^t z0PY?p36%|IrOZ1+>Q3UoHQ}DF6ICxid0Kh;8}VTHx0EG+Z)3tonLP2VNU3s`{P zs>mHgS{Z3&X#l=}N#n+wTWPSdH8;60`!V_K;WhRtJ%UdYC^bPLY8Q;9)Yx2yB5QLQ zSM@EYeeC3EALDd@y)N~c-h24#=CANs{Lh~H#Y#jSMx#p`>xL!T_hEe&*$EWg`05|g z9q%0+`_92kFE+3ir0o03O*ZNczli=C*P9N`V0TgmpkL4Nj5(dM%BQT)brXp~(^-@* z;aZu};{2@?w4oK2@J<8CYCCA4G}gGnDj+`L&s{hC(7_yx6%-n?t9DB z()#Q6DXKdXAM}P+Zj9%qvR963XpQTrZV}!G2%$AQb0?88HyV+`o?y6q=2p@`TVt0LQ z2)cHJni}s_u57vwC49Blp6TgLe<5LK%YgL&f90?jcMddq1X*c&u_B&%m|byEGl;h` z{37baNk^wYNnD$i;SL+iA`E_}mKU`|uYY07$t-G-ShXgNU^A~YA|Tm63PLiY68c%7 zTd0CS46t0pm|Ga{8dwX)enrRqW6A6S{P`0j2>;Z|>l6fAoJ9&U{9U>ag2ynU)Sazz=8Ho^A#12Dc49%HR;OCN@( zSKUb6+*M|i7U?nf;j>7Lg+k~%UR%)XCy8Z~zo@4r{K~P>)jX~>QQ5jiLA4h<$N#rq{;bcq0 z`9b1%GH=5(AD9HIFV-*-ssccprw2Ejj5eHuT>BzZ0`BFwUDPvuOYur(20J+B%%pL2 z@hbqLGrn^xiLLaHt#!yL@!0Kc3u;qU>r&qORP6M>qH{SU={;xq+^XIMGtjV~EJb4G zJ~bCyR9qWdeOy(|$;UxAo~!#?d@%Z#;=kthp+4O0apRaCO!^NjWnA}sTUOSG{tYJS zJ>CC8{zO$zEJK_#4wh_6R`td*b=Jg?Ly7Mk%UsP1Jsee4ee$S%>bB5O2XN`k-B!Qo z)*`2SOZo;tKqVO6QVVBtL4EG9JbxK4){&^CzbY!iRMPk~fUWj6j4y5?A3;|?2{V25 zC|a0wWOxu81Y~|hmPo7p`iEHKUwBP2_y%Z#Iy3CZh6fjxdmY5Ei#04_IFNsJa_epyv;sxP}}x>GB2Du^sV}Ly5}EHQu4^bnXY}?x(;f4VV}(N zrw-jbR5jKepFNS<+Pd}`+O5}$>}Y!(HndaiHR@#i0QgNVDmoAw&~VaiUzuD_k77$o zfCX0&6%|d^r9iPI|6-YwwYcbRwpg@1&lF`90YR=|(}UTkf;y;Kw1lVDIBU(GRrh?I@AX!E9i&OY@ zpMsq}5~EuzBXt0M2w`Z?lq%Fm{kpj;$Om{8CJf-*9SbJ+Ry~M?wJ{Imi z#alSYi^cv~Hkga%h-H2*JpDh78ee9wyTb|Y^(tQ3AHp&Adc(eu{gD@t7M7EcD$XJ@ zen!Nx9|Cba<2;U(J(LK}F9B)ZTX5-&;QV9I7QVfJC9_|XoTk^wA4CpR9Wq&>QHZU+ zpmplWqmJUa1KVlcy4*gD5xq8${upCG{Z{qX{2ykI7Y!(WL)OvQ=@5>Q&9mu@e`|43 zOEI)4)?nLbF!E<4b1Ynp;y3un4tkNL0Mi}vUnbt^e=RGtho3cz9c4;vHD~Oe3%~ed zk@m7+elYX2kt@8ZO$Cl~1?IZ~ zUmj3knF+C%)f2b8{QrI))oHtN7TZw ze)mKDy8Nrjp|l^7R=7N!JP`hvWS`)+h5G^bSuFlXEAaBVVQFLMv=Y{MDS!2`F?Qnu zKO$)S%yK_d+>eAz5-a$D$JWn1^{S;kOwchL;hJvZH6E<69Q0{|`s+4)<~5x7N!?4> zf)!c3Bu;{BAec<{wz~f?{zJoPKT*uYhJI#3iGTbq%*0<4 zRzbM`Ne*rdUdmylit4px_IByY+@bv#hre|(H=RBz)?{8{@%s$o#(VS+=klMdaZWh< z!CYbn>a81LtQ@|B@iA=++OvYnyrtnAa6$v*0m%Xk2#yS}q~I=)3C4;cY^z{dETh&i zM;Yz1a3afD+;a=)Z9F*q?i!)M1)9IVzpJqgzu%%;m0WC$J@fZ*RsF(uSh5(kMqXw4 z#g~Vl2WYuZqIp!|Mpt-=E4=@J!go+OzkHj+Ptr4??EdsC$GIIX(&q3$^;gSSU7nyW zlO+@-?6bx?{DB@WR&1afr82xx?yUD4Vf&|*fsa}z_@!gvdJ1zIqU{Cf*h7OLnVI)` z(Go-i)AW}{FI8U=8}AzZ(^uVZ_N-2Klz(`2{q(n`Sfr!@pgH}C?zt*_xA@t!@bl-` zD{%W6cXsq1n!oPxUJBt^^$FS%OzoSy?(uu}?5=yPJ4Lz^0=5u4qT5~dcxxp4(<}Qp7b&L{S&;D62Gz%8mGIg{yx0LT`(<0RNM9$(Y|Lz z5bsX;pGl!UO8N83I3`o~X-r*1#%=4bP5i50b2zbab;q!O!{J4}o%lAF5pgKJT|&tB zkb{TR`4*`Q;Zj@j_0*re8nW%?vv*TVWu`ZE%|Swx_?Pij8GZ(Me#;PoD?>1` z+I5p}qo?%DhOwsIk|w~vu{69}T#sL};Yt>-yQn06>MhgC6#Y)3buz%uWevm7e>Xy1 zf2@p+ZRxQnj!MJDR{DB^Y`AK(NfN>%zsbmElr=mUYiy(rxa~E$&TuHHJhoSF+^0B) zBX!AGWa|NBG_L{+N*;Yd%eJ-)f)r8%D16dE$}PG3i?9mK3L8c*;xNWTqVj7MwCZxQ)9PD~<+WZ5pgqL`#Vo!Y_@vkyBmJLo{L8u(C2JcX(he&sFZ>F<% zaD8yJxqk0R?bK_E(`Unwx1uCTcks`p58BRt=01zN>Yrl zIyK#jq}K4_5Q-_pU0765>=zWHW8$r>C>E39SGuClqqny7SPgQzJ=z(&AE_eky5U`4 z?(g<_RuL)<`VCnOhJT7$=<{aS`8E7f1;5_1i>;-{V-06f4fokDD5&F5R|i~JRLAz` z`s!$n>fno+VZ@d<#MBkrkdG^{M5ov>JVV+P}XtOLsuF$0!=J z>idl9+gJ6?D5&p^88*PCzRK=W35UdsYg)f{!qFp><2boZ2 zFGp~Gpdd%R%h5~?Q&dA{Q8PJiFUaw%%dwIiahGEyITjb>*frV4ircpmeY^S`B)U?5 z9Hs;d7P`lJQtRC#a}qSU$E7?jwZ{m)fr21XPox*y{33aOD+3k$y15pwoS@krL(GlwQx8rv(Isu1D=eiLH9FA(Qxv;NSBHZW3c{RcN0A6!oa$= zoUUFN9d;YR&s|0D6w+ba5H^dp7#)A(f&l|r>Z<4`!xvEnqYx=CB*T{pAfi$?Qxl|o z<-lqZu9|)_T;5+z0U35N7*x#^b;zjsg#&BqJxybZwvJY}y*&I(zE3VpfrF1*UzlW7 zUpJ)3QT77{*7ti?-<8(?sJ`p_>$7xMpFI}PV})jS^!?uR2f*xnt5RDE^M`BZ@MEcu z*?$w5)kh?`UROvpd_h%!&qd+FfmM9WRl!Ems`=ll@xiTAI@3CyLrkt&_n~-=SaY_ zH~T8RqKJ4@rmWe1maWuJ6uiY>*5cl5NAQ=eac@297oHC8tw;d%mvy+eQuo&B-pUNv zF06|qviDD+QMZjHE23@eS};rW)ZGq1D9+Incvek%7Ivr&Jqu~nx}Ga|R=@NNewj;) zp38W4G}e&U2>IvTr0E?%Y3}gT(hys$(y1EZ zO82M%HoHfS&svSn{d?2P&AltQZi?H&FwU`gCB082z4@h}d`tbpg@d@Pf!6BR&Y~4K ziuZfeCOS!!kJ(pvDOkaeT!+f2qF_SqRYI&F$FE6r4UZs`#+ z1scqB+z#sKHtaPjFpq@X=akCo7jp&TpJ|AnB%e~kZX&$pPEo!pS6aJs__YHIak}m# zD|37vm;a(NN2$yx{`pfeYycMkIWTa?paNrzu?k=XDtM5}9g0?Qt1|rOFxZ#(;I8gaskU5&bY%^q zF-}9`TKBPI4qqe66Kvw$EC@fx*KH z0IE6s{rrR;b7(eOJHQuES)ORI~RQstB@!+aa_l%;8=qbWW;<($4 znYWbf;Q}_{4`!{nry|N)MOJ6+h&7BRue1r?3Q?JAKV8OnDzE8Ds(4~7#1rIv)5?eCGiVPX8 zn2`-~bISC5Ed9$2XJVEd!>5!4p+zm6EI}u>RQLXt-YeGE?qA@blE)2bRnw`gu{GuO z)D7p<%Ym^>{Ag0(=rix_6KgC7e_`fU`o*!k5<}u`&sX4y6kl`EMYC(?&gK)LG5rgc zI_pFtbHnJ$qV%Eq9a{tkNJs=T`@qm~2nU6`0le*nao0b zRMXE2Mt8x|^r3nl{b7Cn)*gMX`$qP5+EB?jGQ+ZuTi{0R46C*;@MKf%WWb6uh7UT_kpa@N3VKbsK5HkZ)e>OC@hSB9iFZcX*k- z%vxS$c#TI5^|;>3gC^d#eMsGY#tDQQIO;KOTaE9Q^q_IuqCibG{-q80@S@6G#!(xi z63H>$@v2Up%Umn^fz>(OaV9I;H*&(lCXA;MfoftENe3F@D46*AMmB#bFGj%%I+4el zUCb7~tX@QkXLuN=m*N@TMqWg-iz`Spri5qs1Zf**7wd-0?5)%)BvKDWNh)tJs?xcG z``@B*L}QS8HT^kSZ;4ku9J}e(3QeqoQrq)G;{LEwd9h*e7`J0-e9SQPsllm#Uv(xw6OG0#!`B;{ zz(&iI<~3?q=vp*L~rs@^ zZ(u>m?}%$npoEguRuZe4f6|YYQ8eu6kq{SVAywP~A;*a!?!#R9*Hv=S8be+;0q@Eo z2x+}_$LgSUrj7X+oGj)@S$lV~s`HwcV_b!YPM$G>~A(5puBLz-Q)^kEgd}-YcK~7?y?DT^9~Bcg&&O zMtquXYUqsJb=o2GyxJDoufu~O?AW7*wiH*_FDg2|4voDK#TMHg!h?;5BKtHouHj`~ zd*j{w6b;IcH&~^t;ZbKB-q>(6JteV*rPPZvI-xeX!aK7>Z*B>PtEZDyL%hNzI)b(_ zyzuD)0RNor9F#K<1#2BTI;?bz1OqM#zddMoZ}yX_MVgBBn&Du4Wz^0+nqBi!sa+br zv{36+yC;^G5nGdEUgfY|k1c@OrPG5ySGvqQ!!|M|y<=XLyN;WFs|TLYhWQhH_R~v~ zRma>T*PJYJ&hXcZAllpejZM-Ie`&@-xY7y6)E)lLv)d^Qo)VZDE3~R4D?j3%O zs=@m%HRfIB2-_m(4)X{&r6az_sS8tV%ab^pAu`gQJG{TQ_;bNr^>XYNfIUqZr>hT*Z+TXO{M7mi-O(tr>s;PLQc_C68EaN8`S!%gd09dBAc@AS<2e!DE*{ZXI0JNk#^+3ngHk0T!((B9E z9mrd~qaBXzcc?Or+-KI*tB0!!iWaacY@NmAM)vuYJS=JK>~5lKbd!hGFj zdKojzgMymPnaTZL7G1%>mb9tn7&6{N4=uupX9u>Gsylk*m za}nNllvQT)qAhGRl`lTU5$tknXqjdlY}ca6mrB6h-m;>c>y7K*V_Cd`B{Ci~PIz89 zY>w}iei{1#3(yZfMm~SpDE{@9EkIX*Er);Ofi0CN=hIMmGB>TbjAen^jtqWanH#@K ziWgkYzZ)CXWC0P@sN+FkBU$wVzN;&`fQtIm_$A`UuoGF1KJ`GX;XJlz_KYb6d0n6T z*eyRfL{pi4P$MNgoH?nEQx< zW50N4tf|hA!YA+fvGJkLV6*IuNck;+O%eth3N!^%_!p`ql+Vv{uu7l8L64FYu{3fk zXm|-C#vnA+CY`*&`#pBNCAdS-#)k-itM;&GK4jJjTAF6$BbBnuw4yAc){m3RRe&Md z(aZ2YmGoD{1xxMOEAx#|PE=4{L_bm+^TpKl?&C%OvmT zMl+XMEw6^NRZE__Zx<8S4FL0q-Eb39^}OoL`z2id#p5);=9u^H{>&w3oja~$8cwUn z^k&{n=>$h7ADJLHtFtQmAQ-5Tloy7>=L(391#wJD^&0x4hL>yW!!acoejIMh;fv=e zXCLD}+~!ywO@}cUK3H~tnW@lRufrBDFpK(|9+%&#;WIUt{aZ9mDJ6X2-S@%SQd%99 zmCbrje5f>h7%P0>nnaxh$d!td<|M(&zJXaF;_#J+)K|w*8)|%eu8>645-Z?Gda=2jZl}aL!X*FJ4sS2w|Hl@Ygy* zXxxL8CC0k4(@U&izmo(Kj{O`twZD8v6`=cI4~{Katn#q0^RLHq(O(Gn`tJNIp+Z&h zG4I~XH|-Q#GF2Z>Z_f0V&ws?PhJ8H)_VtB|_)tW`Y7U1b4uQtBjfJs-xOB!(tmO}< zh@ovdTx4TmndYTFcgi8W{cxzXk<3Yh(pTSWP>HOta2H3i*y|3X!oe^EQ$*~B+xDsj z@Ufx5GGiA)7fJcKR!VcAUZyer@*AQTivV1@CHx^T-HF~|qhpOzML_(u!ho=RGNRTv z5{$fa4?-#P4kr^2BHVfwV$Tj>Tf=_-nS;G$GpXAPbcI_~Cb-hjcvI5t42>bLexow@ zTzx=W!Llq#-t@uVpbzW}A#*PQjJVl9`P+bv;d*V_zPm>!vO;UZ%$MIbhU3aH$^KG^#^R(?QYFV4_m!CfH2?|W@JWDTha_o7ooPN# zotZfi>F}N%eRQIrnmi@IcQC4I-9(K<^u)HfmBv}DYF6t^9~a3v3der6pX=rsbW>tZ z{v;1h$FGNzNTeCja#MV(dcA~SruzD|My{g$36mrJqnp}B!xM2`$R2%GvOgxHt5vqzF}k2c(>M(B&gpm5UV+81n>^cVk%?|yW7`jlvhk*Vl1EKHJd05PZ~BoGJG99F2d9XFq_NDtLrJ{Gavq}& z-^gQjx|#_BO-$TJk&=L^4{Ygs#s@>>)M=zeW#Pv*_rQt6-gFrLsql}EE=J?~)Wbr# zRklkoQJ)4VMmCwKqimvboVT(=S9?s=_eua}ym$Xt6NgQ5*NMsf!4jz6ff9c z`wOuNcIYQ))ekl@feX(agIZ&UOYCrOo$jsEy=`@GTisiid+Xw@dPjrt`GeA>a@GFq zzZd{!6SxfPkJ1{xiWf}NPb?cqNk@F^PPNxC`flJ}?R|UnU-(UVe;(axzkeCs!LPb& z?O6vCYt9q*BF88Xg>NRw)>Z@M6-K#pQ*B*OKHFFO3J!GjlDbel=V zB@)u#6uapb)yhRs#W`+IIFAVV+Y$YqP9D3cV?HXsIUUpeN!<3C?hMbqjP>1rHS5AV zRa5%U*6^{VT9VTTC%f{RCyJ~j5?1(xEYA*)i)430@Gn=3A13D4Gd0z#31F~7d;$W$ zF|YZ}QH-({T{wJWp?u#cphfb1-SzW0KS(WtUUPZ~=P37OstTliNVr%8%0R+RFhax! zU}wSZNYprE|flaUTesSvp`ta|9C^M|2bKKm9oJ2j!6kv46-|^kkJQ z@SWnD1R&dvW|7~dp(n;;`I%6 zJ02D3Eh~CJG}=#}nB9K8Jv-E{=`c); zka~c){f_9+vR(Y^Et`mt=mkzNU#9i2r?}lg8W1F#A6Vwb2TAdQNBDPRqsCDnZVQa@ zz_jJzX~nT4m z@h@w|x)B8xL2gwdPAKkMt7~exd$Zoa^0+qx5!yfZX1xK>yfv-3($dh~GJYQ6kLvZ~ zfBMI#q7%-%KO{XZ-d=JGF9>u+_mxYm8+_6?c&GnjrA#dxevXdUuMK%0WG)Z3##>)| zzqfj=%0bVS0>W#^_Y|=cXbUc;6b5YL$f`bl6qTll!ZNvx2l2X7KC5MOc(FBhuhUM4 zk#;(i#MzF5H=zKZfZ5lAzMo01SQ^m)TA%Y|MF$0a7i9X$EZGUQ7gvu zp|K@0<#Xoa$1OHc6PN?1yQ4|YJ+`7uOmZcM{6NlLFW*|_VL$q*vtyXNjsk^5ShF?y z@q)Otg#M~lds{Wjy>+;^xq9Q44Et%E!cQcU&grC$c<+OpIXozKv-2k(sf&8zUmkV~ zckT5)Uq9jaSmVti_h`E)sP2VFY5Y!`2v^gY)PeXflZQcV{QZApP>a_U4C+Cr!P)De zjOg_Z%7~1=DjJj{GLc=t_8&%pa_?pVi8GWav_NL-zs1T7){2~2|G#MFG@G`*{@*Ul z4@&m=@ASW|t)Txy)&I;|O#}MT^QlJkfVbFPMU#&|9?f~<>6W<6=7u0dniS!Z&Ghc@ ztWQvc>CS#!OE?&L^kG_LSaaq8N~dZSE>deG5yTo_MPHj8@=xl4REbW&L4&z&q10Ze zDKHt&ALJv(8!h#WmZFR)gB+oCDmThE^-yf7M}qOQj7T}E>LcrUj+XjJfO%(f0$jMzDO`NZQG+zmFWl522x z6>Rd}JBF`zp}R33@VKq?#5O4Di;0THW&`1JsABH?!jq&IXctQAr2n}f>hz%ISlBSM zJ-WIR##_PWqH7@tC&1M+PLCB>ec|T&;cb1#86c3MAr0JyXP@n;TEif%k^G`u>#*6@ zCZ>E%JT?WJc%5!L1`}glg&1`C(O}nG`;MSOy26|}_%O=bWQG%yq8FrJoLy~W#F zXss=Md1EaYlt%(c04WBsf}-W6c#h)(d;kGte&4nCIWt2<+k5Z-|M^@$hB;@S{aAbL zwbx#I?X}m6E>KjoNH5AyNuRNpP`!(*-Z?{AmxQnc#l%^~2R}jF>@Uv|@*+$O{A#Qa zIb`Qt&t5iQwYw=FFE66VlQS=KrEOo&q2FvZ+uN14`L;p6$=#9ojF=s{l^bo_#03DG z9P5;Z)8tHlM!m59C$A$Px7s+)e}JR?aJ!s8$*rs{jb$PW8g*AfHqK$%OwW(=6(v2T znUnL-%jjd6`rSS{FY_(Kw(h}^Dc3%q+4u6|JTA15VrD;4 z`nERAY??~^HfHwdSwc)OvuhaB^axk#LV8+Sigjm<@U>ncZh$0H)+*aZh&e0NHTaZl zeG7rx!Earw7($EM0XQb~>~w0)09ZSUl+Z^VF zuSgx_x%%2KzExiv_?(@E=xep~+(i-dORk@IKoVX^z*DwLT82ZhtXT>l$=yosHb8 zI~4tWDMKH#V94K8^e>a%c#8gdUX+?tsiT!n_tJBLF|GDa>#4h`58t)mHth8l+>&D! z+{C$pm_#r2FIEzjEosFl^1R9`b7QKMPp`|7&g}e^4tqX9x)Zrlkk6tE&{^lVKBJ3118O5^e$f{sl?d|E?3F1 zVu#BkX345nn}XUke(9z}FJ{V}e3o*KkejMNj&fBmqw0hzE8NASLa0;Dtn5n_C~B?x zsg#1&g$|5ZRNk7#H{^z=Yxx`G@`dHPAu(Pmca>E{K$M&*gAG)}s-!a7K)_WW(}o*~ zaKgp8H#@%FSn}te9zkl*S{K(^4`V<-gQ~UYjU0DO+&dtBY$bx|_bIeoEV4bQ#dgmQ zk*e72Ak23{6AxmZhpPDt3C>arg!FsW02~d4pY_gqASQLz;~t`PROM@mq%8smaw8wT zJu5esovT$t_D7#U2MrG+>%NUN*oi$<=HL13&Eqm(VYxGcVO<}M(Pe9KwihmPl74i0EUw?9F<0EI3EqnRs`bQ>}tv~wmawFzr@sY)a% z7VbE+RSd|Dx@izZ(9Ekcr(pDdccJqm-piKLtPvApDqA6sN}vx$k6^fWHQ)X5p}8X6 zUB$DpHfUXwj}ff72ScTNT!e5g-@or~m;k4zd#~(|%vLqVZd0In5LmvO_B#i1JB2Y;*Y-uij%$j z9QcZm_D!kHda#Rbzst9@PxEf;H}af0!N!O47W5JMe1r2d9@9O{7+<-sfJdXL z^n|c}SdSwWbG|I+d+}>JnpH9JA)~hMo~xVY%6f2}6~D$ijK&QS=jwKKot6Ax{q!f?BAa#Z6j+>hrAXOekYE}xP2K52$50IGFll&_-lz$YfA7zxe zfTvVu^&mMt{Q0rh3|g1L&07M+TLadK!`UE!zI_~;Pa>b3l<+=;9$WYD?wRIqHD(Q? zVt>57GI%%_v|yQU)NKIbD(n6Lw)FwF24Bx-RvUwD-i%qV(9J4q!m+0AV)*iN=viNH zf~Z}4_Yo%uqBpSz&x>5(H(sjfl`GECdbMxbS%`gp)6OD4W^I9jwSgt$##P+tziy}q z=i$quGh%GoD*?l`rI!XScPETncSq{xFhp+4)}294m`nqNR}Myn?odJHVuDnw{Xz75 zLp+LoiTl)G2^S}tgl&V7Ve6TJ`U0rnre4a`t%C(h4NJ~ac_iAWdUqDV5tRQ6Okj;! ze^r>fAs^%u&*NQjy8;hv6c$b}DtorH#bsfcjgOHvSn{Uv*u$#K&_X}-M6K$O)kpo7 zI(co{%_vs%0Ao6uuBfmm}2`YsZI?YH*eeyqjh>Pwp*yevIfBt6Kb2Y;pq3+X{VV09HfCTfzBMK|%t<~Pz^ zugn~Jl3;H*N5n4eKZAS%4ubCFklBl|P{Dd$XaBCMA>vMW%d}dm`RU(IgtgL4z%DwrjE|aM|?&*cfA!-1;+OqVWmT5EM-fgh=FcjjT z$_)!dto-_4 zq8W;1lJYx(-f{U%ab8G;!g8E?7XR-=3VKtyt^VI60JR}4LR?WpbvnAF# zi)I6$ebPTgpyal*x+&s1q3VUBzA>&7s>*qOcW2w`oI`~)K^GmNx9rb+Jz(iO zkcI+ZhaiYAYT=ul(Q2m<#2;W{=%%{pxe_v`Gec4rV4AuDi<&v9D=$q&=?Ns&>CX3C z=V)DzYJ$1dSD{wH0F4Vwker%8u9qH7)wjzBsJcUncH6ZfN9!rc1K}?EFxpD~$B`kpKk5Cx3JobOv(D zbmtif@p+6S>{p?kCc*+aOEdo^Dcz5GCqH9#s+K?g*JCw^YYw>xM}K5D)b z-IYg7$Gqwhf#~jo+W*!Xb%#BNA81~F&@splstoyIM#>M$Zux1ie^{$>0yCSk^vZZJ z@`@14>BrP3pQ(@_0Fh5vc-mvg1YW5EhzR8d7f?Vax`+`p@M4}SeK+bhs$Q#$#lo&Q zn$&lASol5GRjbBG3-1^1lI_8wgYL-5^{u_?xy|*hb}FBCM4*EqQ1K+yu->h+9o28D zIEOj<&6dhsXD+To4jjZtqCRSzn^Y22_%^vprSd*?kNk*a6%})qL95 zVB!vF0|5qW1luZ%Nd6h*$A~>ex6~ksc=Xf`Sz?rX?qaq6%Z~5%Qg+2_g-p<(l;mQ@^iAX8>fXE;QNdRE6nDpli}dEz<)o_dGY1EF zR0C@c9?7F37jy7UJZ7ox48ENjAxaDqMEb@CEOif`*{Ov-38-RDyC-1YGO2ua^;GO2 zMAkpn4#Mk&J4@|P*9yP>x2bs?aiRVlYQ93{Q!Bd>tC1e))!m3qke9Sos1d`pgM0BJ zn?hQ7FH=!ZpHKgJB)LMCOJ}a2&uaIVz36H)RP|+eEKXpj-R{XYA9~3LBSW71*_EP4 zbrMu2+ZIcWp#4cR2yjKC7dFfDjM+{%A$V@+@8KU6%~f9RQKe?gw%4y#MH&IfzJ?;s z>&I4krCW{tYceV??Ow^blJIMt+j&8HC7W5k2CY~xHP9tnBZNfPlGgSD@a>~Yy%gT!URxaaR z6nQ2{VA4>@j>wY;znCd)=Yk~jN-p|-ZWpMcMqPy7GuK*FFx5%hZ^YDW7Zu6xn`L*HzFEt@21f6_PGFZ0^(lF`2S$0UxNuJ3|x z)_2nfy1t@s*7tWlss3E{aYlcnGXt|yS#weRDt4Vk1H_wap8 ze+RDo#`rCC@z3jnP~+2PJa2CP#&`;T#yW$q()6STpZAgO zyXU`_8PXG#CF|IbK$3N62+J5ZEMea#>g17ycAvM8Aa4&^K9ZTLm<+vlozOak@=X}j z@d|#iwM{{LCn>^r{sC)0hW_wPGw?yKEt)V6k^y?hOnpBQra-0fl2nkLxLaQ0C5c<*mjE;C zlzxS6-RU9-r;AfMwrwiL^+uq1C0rp%#r=}|SCyl@*%EE#XUG;>Npz#0Dr1GNyo0>l zDbIKTHt1BNlUutpueLSUc4H!>`AgbNpe3a%k5CljGqc zo_Z%oBEzgqFs1~E5~jVF3M7t1Z2yBffp};3s;Mzu^-sQi!rz$ydI5j({uRHOf0yzu z)?>5X?|*ZjEq;H_-|zX$1H zJjvA{6tJsPzU0LcH9C$LqB9zOf#^0cf*mZ$Ur4egt|i0+iR(KP zZI9k^!D$z=nNebkRY?Zz@6c-Z*ZoqP-UwOV&gmXVhm~=S{x*p>b*F4Dlh{9pTg1uL z5(=m^89vWg+#(z~U@a0!$zFY!f`sZ61w&ng>|&uSb3j_JtUtDJw=}=;*OvA|OL z-_$Rq>Wqv^d&7M0(#w1+x`zq9hvtI?T%Be{zcKX#$zJ8#gLh)Ha;Zn}RV7~<(Vaps zmRJajVY$E;AG;Krn4SWp0Qe>u2sD)b&c|%;<4fw}mSaACAnw$pLnX$l#-U2xLD_8p>cZ(KsXds7BLt@cv9^h^= z4(9EF1LR!&fYS4D*waV9ic!q@JVHEqz>w&|gGlyLRXERmiQ~m-!3Z*9%%;_5<&6eEI&)$W&H z$SlQOBtF8KRj|p*t^6=*?SAwt^~ok_uTJX!u;dmwssBa7&D`B3XUWGp{V^W;^8 zXb^4CSu3zw^wi8(gADO#2UsqA^y-8AOK=IrYK^i{YOAgLnQo_ zwKxu;Axu&Q3X5}_4=^xuZJB#uq<#Yy;UmqfN z6bZ%@2%5G06b7YVqquX4k%0(#14>UN6o3^v)dyUmlBViUsLr^*d4loTx(UXBtR}3l zPQvI4uSHv$zz5Gu8;zG*6xA{0Y%`)C!b8NR`JwXa2ZGi}X2zB&6U+nA4-I|%RxHXr z-5y?V?rOi6`gZWty*&?GdAPoPAx|wlh1wVC#vVvaFcKe=@;Sfz`1=X~j@A)4k0A|M z)7#_JXv^gKpjFO8^AwCcz~{cXKx^m2<)jK9#WLHS%`Sc{c@oE7Kp=^Wo%- zO0-BSc-0M3eWKlxBG3qYsj;+4aFaL%U{~^?n-h5E|7v$aO$9*|E2tO#hc)SP_p(~4}h-2G*Xvhon*_l{1(AFuzI4+2)8Z+4} zl1obXn!&MnRpZNw`?3gM2^ym_w)!L_Mdc3MsYrAn*q<9-A|A2fv5|K66LUa!oPbI< zpWkM`rTK9>%Has%%kdJ?hqHb7k=g;`fdp3|Z`-A>DLFcFjFI7E{Izjm=TJ zb-fm~-xD8?T#G~gb+h){CQ*yT<4a0nW@mj+gn-S7=Z{$*#JT}S9v{l)9-C3{nRRgtJbZP;Hn|>wxW>PSN0|$vz)!b$gbv1 zMdrv}=G5L5W}r_Zj~xn&m=dj2_@~VMkYDUWh)MGnW+B8#Q)|Po$n2k!S9)?elM5v+$L^-mBDsmq5mH_n<}15$462Nn1eU|E7@mQ~ z;64|6IzjY%VX3#6%nTKP=uSJd~8oJo38F1 z^byqhGZz~xFGLXbp@i%#0sNs3jLy=mAaqxTs*m(6z4qt zNsYi7PVqIr6Mpf{^?p!R_#E_gpk+T^81O$T!_Y}tm7SopFk_vcH6QWf&jg~65dByR zV11?wP_k`-@2djR9{CG({vt2GGFv;gflJc)MRQ2Srw8JLzFBVvmCu`hs(xen>u6s7 z@^i$hJ9D{#gY16F7xEVGa$I>`_VXfB!!eE66GBtMHQC!+xdp^Wf@|)DYqs;R$mJUj zI*-0Qi3c$cBOu}~Y*4SYu3FH%WQh5td4zFxjLicG|#$NT)y}5kpWI zykmb&65&UE;vb=ME1U`+wZcpZsH-Ea#fh<}*y)fu;R-Xh+8r41^nxh;Rs8h72Y(^$ zejEJa6Hy%P|2y!P{oC+Ik=GRbSD$rU_(%SG>p`Da|K)n%to~oF2cVLpu!Di-VS*;M z9K&P`W$SU!{oSj7^CX?~N}GM_-herRQa2Xyevs$hjAGZZtGw}1{1N-P-e^8xk)inI zLFFsV0PTVrdowh3AX2)2U~|B?S(IE8%z2VFJYMlZKJN{|i5mo%6FpcJMg+*{;f_Z14r-I@6AaG6 zDr9cVJQ=2LrQ- zn%kSYqd34XIq00BkHX2DvNB1zTyj7EVwXjHt%#CTTa#IbD1nudFZ@aFugk`E`} z7cws=1eynV8!S7|0|hz0eH_H~?Zm^ALcRkO&1`)_rKugwfs5!bc7`HQMq@MSH_N~ee)ykNW%zJY1uMXZP1e3kf}mJVxV;>FNiHqW(^|x1r zqXn&{z2sN+8}>!CoT)LCXs0Dx1JM@NswZEUY$5Z?3FaIb!$8T~HSPF1`5g~FcRC7~ zAcm`mv$Lv3^vx%9=r zLs3F-RG63|-(eG_TdT~ss){$JP&oski7HxHoYxW@V|Bi&NH~lKnhh(vE);(2I$v;N>!g6MSp%oMf>l}7*+I*njbvYQSwNc3{|D`2jj7K z^(Yp>&ecpLMF0qyMb>cN1ar2R81NFaGZII7i8*c}vP3SPDlx|XaO-a$=-;JZOD_=L8thvrQ2~iK zV`Z5X4@HE_Qp{PAms^Vto%NJZ3*NQ#qAAk6_U2_R6VodYnZb#QxnHW-t0^kuHlj_)Y!Jw~p5o(z%LIS(V=^4%GlwI6 z4k=2ndB;c`^u4O&Q|{1HrSZ~S#R{T_PB5Y)NwBV#VO41o)6fjHDs^#`lL@59Sl0$ks71#p~H^+FxBBPNp0%`PPK!d?dRsGdw=y1Nr!h$8M zji}r_;;^}*xHZ{SRg9zRojKw28Lh9SO`)CAw?gK{eye+z3Fds=F05-)WKYodrtJB- z^FLqJ3R>=@@DtRQOs6cW<$|{+A1G#`1 zneSXhD;W~flrBhDXDaEc?A_Tyc&BXGhHXu62F9KyALW)42Td#V$Nx%w94jB+CtK$D zOInMJsddU|$;oXEKsjK~J(Uk~I>(3W71RtD#T=_ip+^A zK+<1Ba&8Ke%@j$D<;zh0nf=9Nwh+~ca}($2M43_c^Pi!l5c<}p9gy(LD6=o2))x$fgjrn)2SwX$=$|FuP7LN8VF0yZndj_sZ3o9s5LYt8WLa zNP|6~liPP0#9!dGNSYo&)1H1_?KN)#O-%P;Bi-MC`&;h*4sd@3(UMPyOMZ*oU$x=k zz0iG^y(!=GG^Qx%LoY#a%#Fdzn4bh58V`qCfn(#zZvET;}*G`Y(BY&1VAk;xN_`OG5QZ-%@ zP1hNMQEg*1Ho3Fjw`%5|qilC4h>`r0jjO=s|LDJer`*4j=U+AM)#3gAtNh+~{DFMu zf0^|i@7YW9exSQh0#BMm-20mFuav#xKrA-e*l#YxxxUbJUp~Q- zP*JDsH+!sb?Hw!Dr0xGQiLk9cYqpxq65sk_463kq8CJ+7U!d+EEld>6h264uMY;cx zeb&=77r$?949Ph{fS&oH0Kk_`Xdq@GF^e}9NAkC7`#(&yAPf6#t1`bt&yKpBz~m;t zWhJgkcNhN2tvR3Z=slDn+w>kj`+}*`Un3TgYLXjJ-lWSvLV2?(y0a%q;U2!|oJ?*2r1{Hi>y0?0uDv=Z2nb)+lbJMQ&(H(;dS_;;dWo32Bid^O7P;=_M zv}tGif_>)N#QE-h?WUa_3)Y)0(Z&{xER0!W8J^5=DRaFK6?_ogTX$@ajaeVStZ9@0 z^Y+9k8vZv((&~{&y&IUj=;f@eh1g8ld}1Gp(xeAjIL>+_d>2{QV*4|B&MQ^pH&e4L&gOq`dA&T`o#)vEwMTZy1VJHxGt{+ya_WuVpy_m=Ur zxBOhgh{cQ)SQr>sxxlDn3!_J{D-ZtKLH8JSC-H<_mTk;>o^_~l&Rsoxbn}S)D2u-g z(oyLN_cnc%vG)yg0mD;a?yTL}(^$L-*Dt>4&O!d#BikomZ7iOY4W=2ef{mo?t-guR zIb8Q?+SOSGZs(waeFd9lw#o<@vB9*3O=hljZAq{pPK~ZBiZ;!UHg!*=_nV9lR^nTeV*fuUoi`3WqpL(X89ZN2<%I?o81~vh4*Km0^>g&22*%!C}IpId>{_RAgn1uyQXJ1#PR$Al|ve z?u#A*&S!zo4Ut9+BC|Dm=pya79Gen?46>KR=F(W)DfY&LuZeveG;^~rx_eOVq3x4P zjm7@#rrm1dZmm7Ew;Fh^&57>GYueMfX@{EUD+@lKc}SDX&0_dwJ(x?g5i4;KkHQQJ z{%L+LfT;5{k!c^yY1-4VU@x}0l6r?bo{D946Us07%6OCmyDXvslwM)~wniiJ9&nQ4 zfuGwwp46y%HrLDe5E;2aB=|EZ?JS@`20hB~XngwYk#3#hz^=-g;zM=BCRD}8L(LFl8G)4_8sXb{gRind zT?&{%YfLXiCDolk<5l+hYTdH{-$Eq`Bl=^i%Mzp<685F{Mep1YpY7^Jm<1 zCk1K=pX1nL6F z9hNmkZhsuX58f|n67^eynnoAn{K8-Rh%aK)_2NPHH|xS3Vs* z)M`9B7py7DkMf|%ergv)VV!sK+lk%o`r}RozmG*rro!);7TH4cy9X7llBw{$jH$p3 zVaa>Pn+oCY0Y$D`<)d5?i8d*k|H6DnCmN5+p}8YBdAa9DNOEL&b-Bilyr`F^6X{!^ z$u2URX70@58J4^i=F!Zk!R?jC_q!>R zBn4LqS>Z8m${mstx}KD)-IVJjWtgN4a8rh;JW|hjZpvk<9!bF&!Po{(6tM)f=X3z{ z6rtLE<}!oK?H=3{nPJZFfDnGVL&T6N*(_?ct=lJG!pn`ITT3Cet$VA1dGUzc=#IRm z9ZCvek=s?7e3dmGaurU`B?#QA z4YYF}(3qAG#?G(6WYdn01qdzFc81hmk0nhuch_gGo(;Ge=WxN~#$d@mBeKg+PFMT? zxvzPuf*-T}5)%gQ(~+hiQsxZYWZ7x^^5mXNyC?Xf68EFf!NTXG_9}VQhExHV+NbiR zg)g(8Q#4?eEmv)wc-QMH_joGdOhLYBB}FV(rH`f-cKTG{(0j6!9VVxJxo%^aloF3= z>UOcfbf5mvKHc~M^>L>?N`HA%L8~Ep2gYO%)akjYhA_gA^mBB2AMz(|rkfMxVI&V9 ztxq+oU1_woQ=m$EB99CuJh-T$9qdK-*>CAA26d>_HqnuM&N2D8spi7ba z2k_3D11#2hhtG5pP2Tb>QtaIw1SnA%BGqwbBc_1+L6|7qxlbc9e0{|nHrOZdhVexs zx#p2LS9_?yh|t$h$upW2oi4lbnaNseFzT49SxyctP~d6Hk60a#1hOSuk#df|XQ z{c*Sl*EWAoG2*AjD;J2S63Z(>v)#=3VD2*32w71Z8kS0nEGGV?0g@VOb$W|`c{iT0 z%IdeyzpNLJ{r11Y^m6TOSc5{vJN&Yf5lfPT=z+CWn^AF)*rwg8j3Gz&H~GTnR}rc* zwoGxj#W~n>S%LYrt=($6C3>fO!~Tau0R5%9&W9F@3#Ho~QzP-jQZZI=vAA!+h z;~PA1AmAfZ;jHtdIyp^&)9V1TGiS6UlZ9EvV<)(u&K2%oCd}W69iS=nLys7baVZaK zY{Al&jbOv%-o%Tuh9v`KwWJ%-pHVjcxKg^CkGn^H3{&HjcY-qy>MSw0`oU&QzP-%NXP?0?;fgw=ufrynb&?IIy47$Q?o72C#^->AnW> zE}&=v9^@Qhw!w@41oB^IzXis|E|hKe=@>>53IMsIl4AyCanPG|*1381*9c5}=tH^b zQVxizZ@c}0PG_-^eD#!DU~f`s;ZrJz)i!grO!N#cO$DVH6*%VrOvs9CkPUahdPX=w zJjblu0L`;Kw@*=m6yB*ht)VkVBD=&m{W-u)EC?h;$MkG7OuiOy?AEV=$vWkaUIIsO z_6MXXc_pPEP~M^>Kld&}q9?OX{Bu|spAs+qt|{k-is2oyD;G$eW7jes+hM?V<;{XG zwwqBLy`GJWCApj2Dx2M6Nh_Rkrm6r^jxZ1joT70Sm-;$+7y5^pt823~N|eG+{Hpo( z^?SqL0D&hHT|NJ5%CIxn{5()_C{)rKUTZ8aDUT(mo+iT8l$Pkx4%P2EYx$zGzrVr#qG7kpuE_deVvA22yY4%^+)U+{K?`MURM2%nh$(btQj z&GVzpawFrQc{G|VG-mad>aDUn7=Om8+Zu0m-g{WzXGyOQ7&?j9w)j1=LQ z9&-me(p${x%0nePYx+t(vbnlQRWhVaB`lEktFrj-5xcslj?x6T`HFXhe0w9itIP*5 zrkPTRO!JFO_Nn!`q4;UsuRwP-qL4+Q=FBQ__rb-i4&%5LF6!nH+`Zub4D#5jv8&|k zgOV@oUjn1xSSnrPgt@9%_+GN+cCaS3U@p|5y0eKnT zyE$&qjp2)o8#VqHeUrMQHZC_7ui-X8wZK-rfK+H$0pk67rIIWC#*?D$0?3qpO6KFJ zQ2a)EH>D6me}98eF}HDysCW?AtO*@Kkn>Kb;?tE@3 ztt;(j{HzJ2biycm2p)&rkD2zQZRQJB;#bKuk?8_NLgS~hjE6fh_B|t!hs9nDvvdCo zAP}prUxPq(+B=U2uJ`rKZ7D4<9%B&@6JW{7Zg>=2#Fm<>q^7_MIU`a0l>TI+i@6-|YIQmORU&V);Z2;YA*!|#eF&2yHJ&Uro+Pl1i{!aA zhz}m$r_}`^b8;bC!E6W_j0Ob9s@p!EL%PI6wq{vNazI3L=EO-5;(L z1?$;v-O~pm6s@wC-l6Me3_Ato`taUFk#gm57++*`^p{PFC#g-sPWx!9)K0AdUlU#+ z5k8b&LBh-e0Px(yZG}Kr;3@)$`$?t>zpg?y501T*8q%{%>-*>A1*{PqaW$9v0!C#k zpb#BM4yE8*je&t9@@w!&RtW{sG$7V6_otLztuk*w$Q1Td@Jrbc-iZ7(kaclKen5orLmKLk~OUyQ12IXOEo z7{39o&fT%UGGke%9=eYOibo+mb&15udyFZ=9r9rO;ry!jHIV39Bf3;_h}8tDhAe{8 z=8ZGz7V^k79!w#IcJ0>~Os?g|M@ggc=owUpqrjqM(~G{DtzYtHJjU)5?c~F~5lFc( zjJeVcMHrRX{IV*I-oY;z7`+t>ZfRC6TG;6L*R+Q=hIaLv*DxgIvHiwV@Sg)Xu*~iw zBQLAiXbvDxlGwt$axi%f+oq}={Bk~6W%FC)tOw41+Z6P*Jkp<(}UiSvY1I zVVp%6Nw{7ZJ2AyMd%B#ny>QOLxtetv(V3J|>{Hqa9@sUh81_jMFwnCC*0sK13Bv!T zNPXgD$-svsYO-w*R3aN3=@AGcX105OqKsadL<@^;gM1FMeh_o9F%aEYXg^$s#M4xd z7C6!YOt3)T2F%_#txDAtS)AZ1R!^57Yis&%j`wdsJ59xD@!0V`jb`+CPtFJVg~5`8 zM)a@r5H7AA(lOrc8Sh__cD(VHT}RvTmR*NC-o^iFyeBi>5;ox2@s|BypVR;SQC>}p zGRphI57UF12i*}?Qi8XhFuo`K55{+A+wtWF+c(EIah&nZ{h=D)-`;(^@%hE2(*K9&bC-M6c>itu@y7dk*v9{0yi@ul?Vkzmy)E3^ zZ+^*UZTa~Sz5P2BHEK(1)E(g+Z#In~>q0pPOmIbdzw;r9nm_nqa+~JRI;3#t0o6*m^H+_^H z{X8$Qgq&HNtYje^E&~{z}}xu^G=^+W+?VuH)2s6KUzyrmcTx#+SW1f?P5j z3d;Cme_8e}q@?CES2FE;IYZaVi;ECl!UHSf6Z=*be^7Hh8O!e^VxA*u&ohY9_wjdJg5uIcL>RCVKt>=2> zZgN}4Cc?B#s%1+3LBsxrtN4;erCRU3J#JTPA_ZVTbl8H6$e&QSSP%$UZUwbfxbt$1C8t=D$@ zPiLxmH%i+3YvRY5^tM0_>65F4-fE~#EL~!p;y`cjhqrjOYI>uOlkec5=0wd!mpFsT zM?V$xm-8(2rv#}q@S6UnqGL&RsQxzk!$pfre~3_;{+eC-Td(Nvq_*@|lREP0#6n`yTa|nRmg+lCB*9gdk8sHvZ>~@bQi!s zgwh^DsSF2$(a(;9?E`k}g9^i_>9^ZuJ=AkoupRkr8>*{*F|ibsB|50>oiLi?Ob-|B zM(kasoi59k*`=n(6RH^_wu%ojJz{hrLtl9Ps9J6X=Tj*|k+>Jm$uE3q#W-Q_s3LX# z6g?RpJ<&T~W%z1YQ@Bd3nM|h?lj*KIcdB%5WJlGx4#8xzP}AQM?LBADds$J^rkfho z>KIz@FGW7l@a6TxIm`Y;4O&eNUJ>B++m^<75MMgMfx`-GjG$cbh2AxMBSofx)NfQw zFb&`IsS2jKT`{KqY)iO={a2#a~8d0 zfv^rIPJS*tdGu{f)BL8`nubm{*{$Xz*_VrRXifjtH1xU2Zn#a7cF=EP`8&L;+T_bs z@!H)avj%Qqqi)Q4ix(V9=~Wnwr|=?a`9GzM?AT*}BM0XlXg&VO6LKS>4d#GH?Yj~L z@tsjmILkFsiQK7<8;p0svw?V0Fe4n?a?B*gd&g`tvN-2E0UfOdf>FM5RbQuGRYqZT-oml%YS@mQ%+D%d03^@rHh=6_)yi?q}rWWv? z3}*4o9s|9YIy8kyTYn*+;-Qpwp5dip&Wsnl#{HEsR_}I0Z&kaQJvZ@#ZUP-7*S%cj z#7=e%u-Madjm~Y-tSHbKv=Kb7$?;Xpeh34bHBLFN$6N2QfS(GdP$Hl^^>HbiO9!p8 z4{iZ!A!g`(YMS1iiL~mE4=chB&>U7&_7z!P%&y&>ER!{M@+BSzk(I-gDo&>g`*JBH zM}9`#Ose;<$wQSg&gc9{Ud*q!^Nkyb5naVuSoCtzxc3cjV$_RyshHD?sIh*OT*JD` zb(H0`hfXsdGx&lL7_Marf!*UT?E&=8ERxZHB_TzmI2ftU4K*;N3l2=Iw2*t4dgfvNEOE7ucYQ@uxHkd#dgZKQL4xRyZ?ux zAzkg?zZx^OjxYpBxEB7)TNQ7q!7Cy^ZTA+E1D-5z$u8e&W zK06cgt8Sz;AF}L+DJ_t1NVkC(3ahM-hf^|7~_ z9c=ozNDlAOZ8mbEjry~)#oeF_1JZS|uEIX$itrgQ6+d8iv80>UQ;W>{24briSPup)DDr$=h5SjXVV_<0GuB6Cdqo zexN5*E-&hf)&Iwkg0ge4v((4PMXEk)My;LX3Y#1dyE(_Fa=vwsswTFKUeh;+ShvYw z(~RQdID2iVEhVXp-qI4ret9Jm7QQ6FOrBg%DXd$IkVz zD)as=YEio&`!2WWU!`dD$dGUk{v8#$)XNNfU5XKHzE77qi!!P< zaWH@t7X!P1Uxtx@F<2`dGpA;#T4eoyTd)7S)wsBAjMHs=ETp8`c))FAnaahWOP~*F zLj@1-Y5C< za9vkClQB?pYSJ<~dqu}%dY#35o~U|=A>#ulf}?09#~>C&P0 z(C~2o+Cw3tw~25OC=^k2U`iBKnis3=CHkdsq?7|=oyvQuPUoi$QQDxS80&28B&~=< zoYo-`#(00Y2$56{*R}j2M}*tOLy#_Y)5nZ8neGvl)1Bn;_(;0&1_A{W%~GU@(@DjAxhX?)#fBTR{XOeqWl*uLqskAyL8QL_U~^-g6S$+sL(Qi%YzOo&w2w>^b?Ze<-cV0IUP&`B|qx|}Z+ zP@maHcBRsPBpnVW9%X}9WY5z$>J9@R-2 zJ+0fdJj6yAAx@!4yetWd&fJ!PA`XTS_uo|W@E>%`Yglk{f{Z94a+V#xTN92l7!+4@ zd+e=9qbx_U1$=i-aG`n8_MnN)mh>-Z(yLLhXM{W~mr$b%_eKTRXZC(BL6f8eWr?QE z-C17*8C;XtK#KokxL5oThF-E5K9SLYfc!!#$5uDTBytD>kf^^ekIAYc=VmF@^cj-D zhe!r47kUPO_5Kpq2QnkJ1FS0}ruUaKsn&)2RBB25Fjf9*y_k)pux5=nXb58O^TBcW zr%jV#gb~@!o~fI|4bL~5>+JU8j}%wDsB-oUpYt?toB3M^+8g;J!kGH~DRe0B-}P4q z)bjU+`zQ7MfpTeeRNM2PnIAWRnxOgh93#Zu(p@d(8kYG~k>U}uZW^(LBQvDAZ(OH=>)ULoe<|>{XsI z@*8c`NvGY_8U?DJLv_C+(P_t=!B3u5wv~|+d%e`^gm+54I=TAt?+F8nCUB7Ek?|>t zJ~DNQZQZX!4x3(@W^15Q?I5*^n98A8fI-MO`ah}T_MSCK4qqkO82G! zOIs`%ISEJJF(miSG%B~N-03N}|4_zF$*{NHnZ|uT`lAe*c0ie#wDGVogM1Jwv{9T< zA}Osm*Y%}?d}UYv%m5w(mWsv|GM_7#H`kGWvvT6^X1hr~G%N4@RFM^E7{|Zz$0kO$ zDifMjkT99uGv?ZRz9()L=dw=xRI*@ct;FKEB@JW09Vp&NIeAgbla#?Q*-mbe-LDjT zhhlTa{?tQ3U$y)D19$=#!J#snk2RdB{C*lM${5KLE*P~F(irEB_|3`MWwLg|wnQUZ zt8P7U%=nzZL%1x#NTeVnrzaQjeNCNY_JxY~gmM&q6vn$K)F;CcPVNNBtMAYx_e<$7 z-2tUi-I4;{al24EV^daSA|}<&9~43emw^8J1a=wqSw?IT5PDq|R(p3P!>44x`>576 zAQe9>I9-j8uT>ipi9m>xqhc=DRnATHks6G; z2PjiJqXilmJC~gEA_XcmosiR-%1u5j4+%&h$iO$2G-brfq!p1}#Y$Vr4fJI@7_o1^ zUE^2=2!UN)96DrQO)7{Ljx~%APB-~#xe*Y@vlrX0U>?;w4f9C?ZzyNHY|!Q=&eM>- z%B&MefpTu7L#Sjt9@1VQO~54Y0yXRlRHjW?;omw>@hCl#DO)!Zs&c0B53t@0-jP2C zKMndX0MoY3p>O8?NZGeyuaD1YZ1j$|QR;yy6ynPY;=-G&vcx^3GS%~ZX?Xi499gJ+ z(=mZi2TJC4;V;}`y5&k* zRveU%nZ=(S;uYU~n=bwdmAwcLzly=W9@Oh&&(moJ-pa;eC`X+^ygJLq!e!kp6j4W= zux@3V7*4b9>29_EK_0OO`|~~x$fLYVBjG@TovC(C?Aj zJ+$mMUzI=|pH!GObK#~8?iIk9V*pQSa!Y|zv4za(mPAKyzF|qpxvi^b_)mAfJb@T8 zDe|~CQKV@c-sz*#VTxta{tX<7+n?O3I(>MBchZMr2LmN{*0z?PV#~GItKS$o%Y8{Ioqn`jwOu zn8PPmnqN5=OJj?D^4N<<_XJ4}KS>cO2qJMh@SCQnl>LauKh!Jf3;9+hcQaeLsZo06{gDgBd4`-OdD=|b=luh`s-D@`rd#@%RBkLLqEXU*hss>K zm@dic#Nz>Zk`s>!D(T2|ku%7l_!o3wvI$;8jpSB~D~>rim0-lysJwQ4x<>-6P%A^x?#@ZCaYaL27KbvHuH z4R-%^kQ#T-Fgd268jc#a8nRC{>f$7&23S8;HJx@Otc+VwE3bN;Q{%ZUGb{X9{tp{)1~Z|cU4#`4uNp=4|8l`Z%RN+S{ zENodqp*yI{GI}qqTq;;r;EzqWGM~IM34fT;N}M#M+u_oOd#K>kZGBbZRng6DYg{Ho zaxhFoe^2H5@_G$Xi9jU6l{lYpaoBrD3WweJtPe7$fy9)t5Byjg`-AVJ09I}5=|qb_>o6{BtboJEvd`kkK8} zIb;*0M3|Ir(iDDKT2at97{{n-(x(~kROFwQoWl7O;D_#6ob06?4P;C#6N^HhOe2m4;Cb-0l(MH z_5#_Y_UZ4qwJ(rzrJ#Sp+&h5uNBliN(*NYIFJp;Tf^jZa-V5`SxWTWis$>RA5B@#0N{0fi#M#RzeYf%vVgA<*M5C@OHs zvH#GiZjIrAj)-P%Cawsx(n&{zWaOUhSg+~G_ffi`Gm*pqi5%OGWUFicdANG^`)ix$ z`^8==#A4mNM4#I<)m#-cmZNb!aV&)Io;v*Sad|A|tnD59=tviih=fSSY#&*>_c5|| z*+U93r%ay%RBCtIVMV3^THEmnxVO;yxLZ9V#u2_?_N&WX5~qm_`!?qxKnPlsWr*tS zQqC%M17M8}$js@73G* zhG6u=qa&<7r5HhA2%-HK;U%-?ROKaeEw|+B)8Z!@gYEV`okS@U-FZ|yK#L?;#_&{k zb5(q@x|2C2WG2`R>bnq!uXcfn-08I-rWUl&Db)f!?LwWpnrl);8>J}w9=NIQxtyg9 zzZASF{&?*OuVyhcXfMZO?)E4HpQ~`(YR#C3mazkPpLtkcqu5H{)twM)h`JGSihCoZ zG#2uG=w1j(Op6{K6F!6fHcV4AHH^oqAEa-8Tw(q*af814F+yNIpGxhHz$`17iXtyV zI1pIPRk~}nj_8)ntOB((9K1lCA?tPH(GqlTHke>&o z(eQ5PYxp5J%2u>-;_TV#zN8ZsN*O&*lk00NX4v)p3P5`BSM2@;GM>G(|Lw2%9Bf<1 z2+}h9bL@Qa&I1VP^fj~85aPC$iBi_zfLIR;7~NugO3zwrT%j6QBWD)a>C_s7F#+zuf|7!LfR6D6X7lQd=e5;Y!`4; zkLhC28bS&_K1dwB(3v3<$9hhN5};;my?3NGCv$3*IboQ|B-h z+hF-iLD@I3j0#*-nSx%hy)bAEmfyw^)@8&Zt2j+w>Z^Q2iUtXC4_mCWtd8sm|Dk}H zPo-&8Z0olUw{5ul%j`}$th5Wd%y(+e8*b&Et*jEo@u#sk2YYJJ#5sSmX~>ni=W$RO z-kkU)Z8U~Tw&L)fTyz_g6T-!sgdOzk%?WK;{R7p0f3Kp~aMqXfUzwZFMeFcCLpgtu zUO88*5f;jnC2@QU6qsW~p9D9`k6WEBA{yQn%Wk;_fa-oD+oOyHznJpE)j`HD0l z5&V$3WD>qmuEygOOuXN{aE@Y2a=pDh7deoQv@;6vGu4i*6D~|jc1QYf+s24}N`>51 zH=`&S9io-iuDnQctIHXGuB-86}$*S-&G$pZh;{R=xb?pk=4p%!) zjNHoK()#Ew?8F}O;d4aXiVZRr*JL++s9a|r6|Z7Ljm5*i)pO?U^j2KJxYa(9=qsM{ z#9!ti@s}xW)%0*=Mv|WPlPT$jqP4lv*1pyKqpepOv#ui-;l+(ukcR|^ef_6i?7FjJ z2H{Ra2EysjArda}_*Y)ln9RBtq{~S3A1CS=oSR@WVo!e)qs^VVD~)? zX!2c;nhe=>FRRA!>$mkRwT8rg43sWOydo5V<-dY}5n?HAAr|vBR`*LYqhk_wQCES6L$g~GHvn|QAD7=Xjfe(MzeG_*FITy zqD~k-9sctSIxN}J9lnFjkbn|hEd)bE_%43&|Cr@;rwFMzO&@kFcs-iLa(@=xcb(bP zMM0Z4pQ?YLMra%oE}ljy%UF63k6um%dWg~byF+{(Lmc@L9HVm1 zBRQlc?QumK@uOL5aPuHE=Z+(b(6Hff*bmVTDql9YzQXs0%b zD*-6cCq<0god2Sqx@fj4x~HdZc$Qm~N#G7Va_kC9|HUhfOnds2ix_$c8V>~K>zVAX z0)#UD7iElpB`uMajLwxjQq}jViWG$$=RDHZs8th?pHa)Exk#2x&N^@62b5;%Q%`v! z@t#!TB=^Z(pgFE7REjjCZZDPAs#V*kTA|TTE0ocS~4gwlBNLL&v&M0ADA)@Kt-8q;N(u!s^n!{-R*R zATWki6a&30*7)G$LZOuCEt{flDHB5X(SD$MQ+a5h=RQIrkJr_NA&Xo2@6phtJw0TW5nP)^LRtR!psN@ZNrmAt< zBeM?g6KkElFgfv!qas>In_Hqu9L(GUC*W+hN4jxkzmQyQJxB)p#QhdrU=A2Co6Lj8 zT#3GIJkb=R`im%ul}Fuq{8p8`Ys7l+z`W?#E4J>z<)*AKyb-YrG{_or6>s5QDQw9q zBQCT9%(M5}@x8|>LK%WC%;H3TeW z#iSk8Tt|;ck4nw(m5iEiqP!<>$$m&q#skRmr~n%5ZA2ZwOrjy13l7cVn+t#d1zt{)+`ws7*79qUH1$DHWmYc0XY@if8dkd*=c%N6iEScDL|#c( zG5ecU^lw{5b0z4eB}*{DPL9B+y6XOKNLFhG4dc^r*@fg`V*-#p<%GT)gf)q_U+zL- z>Cn3al+4yXaUxlA>wSq+$QHqaC;s8$qoDc&_7bvy&qltXq`%j7vMlV=? z1vT1L&mX(NQ}JIU2EEn4P@3&brm@c5)R=jH41Y#5Kb|Yy5w)_6R&i5}L{0CM<%m1z zK)fe$s~FeQ@Ewvtml}(2^;ts|!|xW|SQK3|Kf0zv1G6S+48iSlp%E1^ z)v8gR2fIaID~hg~A6?a9xz6!L^e8!=CP(9r-17W2QwNpLj(j|!x$+alhRV;tW8`B_ zXSagh9fDppnQ;5KK=5+b5HW&Uz}0rR0+60eHR=fmbdAO=SzLMAQ4>^NEOB2ZUfr%` z<}QR@>v4!Qmyw%?)-g|%ho>5gPh+u|y@h5%)(D^3;z%qhRbddjq9MFc9oNo8)xH~t0D@j{t4 zVF#l4IM=Pb@#rn|1V?Sd#J9X3q1#^|-sO#1RiuLhgz_76w7+J*b~9g5u(F`V8Ng>y zfls3af(oP@S*@LCWDScR0u6>-?YJV7B<-6sN+BP#)vZ8P}O2mc1>$&et;{uHXMBo62MwL{3Od zf5E37!!?k=1FV`dwH5bRLBE((r<_^CQtA5xF}5HYNjOmw_AVqr2%Cf>|Bt$JfsdlP z9)Ff3kU($(f})}X#Y(ERM4=`EnuP>rVHZ%rH)^Y;Dq4{20xFM%O-hFC)YRJ6K5VfM z%df31w&fwUx&cfAD1?V3NR{9NXI+H=3gKb)|2=nR9|5fN*MC3%&)+ue%$+-T&b{}X zd(S=385HcMfQ`rMFDqK_Efm=@zFBtp^Ziu$vr;~g*Q~oKx0KK)e>ru3dd!uzhdj@@~xS-bRoENDsKT#T#h^m`{)cZ_r z^|5=X!e&-+S*?|q*FqJemC6II&kI(Uf0kCgUG3koqFSqGUaM0r L7N_*NXadr09 zLwH~Q5V0al%#)%|mWauk3U@|+CsHq)ffLq)S22S zv_(CIS*b7-v3I=<*rgCMMvk+QM&x-rk`PN3=z99EIkV<8-0S zFeE7PJ--vgw>!Owch3~wt4;^+0vd4OT?JFs0p8fRlE4P;?AZSlyv9eq!gbohsY#GU zHcQsuOIQ|>C$6zxf8K6ZyobxBJ1TF+w{pD3w0zZmtS8ywC8Q${V&K)>My^?=JL<$b ze!rtUU-7XAL-Pt;fycbz$M8e@hVCml;jdus5sH3U56|dBKAyoG{@JbMSm_Plc&i>B zlkW}RUg`~xfTg{?Sn#uOH?JUQ9yXZYh}ZQl94G75JcPt(cR0xu5_x6rlJTbZQlyy# zZzjPqi4v3>Z1AybiA_UlI$2DkViZr+hO)&o2l&VtR#D3g0OkSe-l~4&^jCs))E>gwq_OYM#SVCc-Iem2Ql<)nmwRgH2Pb z>fG79L_ANess|VB7iHNQ9Jtq#kK5ShD*@Ohr1%6EBCt7`qT2}7sn$2~t$)B{_9dt3 zaV_)>df+vuyR;R3UG8i9Vv^A6f6JRG{MMU3qe54&+bLuOjOgP`YsdLPIUDgBGn|^0 zw^Knt4n(3NQc>@FjcssM43bvX3|e=5Ul%YI$y1_qmmJZ^MJ|Yr*jQ%p18z_cl(?^* zn4N)sfLN4utOzp1wS(T^hU6%Nam5VGbr#PU>Tjaaryi2*qN z-pKqL>>$$9@;!D=#n^3%X$bTrc=fd|frIZ0M#oBD6flMd`De!YO079uwz?T-@q#Pk zMYT1Db`t9@iW4_@Za2-fG%c^H(_qpoaxFSgPYGB^qpono#ER|KBubL|v78^1`QR=`S;=f^oCu&4;c}w4j-x4*sRcZ;i>Z*S9=*;Zbukz# zo#g&!9F*-((!Ck=lcVc-EvcV4dA;ewS;!lnM$IGE1S$$n$xbLS$G$>Pq`ys-QyGho z`OS#5FS=+jEOD4WXx-75-75ZiU@vFH2J2z&7@a<8HUKH)Ov0!3=j$a+;|Sde>|Iyh zK-k_-li`3hHdAvowEfWVrNzv9px&T@qPTM;(^Sft!ovV zguOhx*J4x*s^$9fW;Jpc4NdFoLsA9R`@@{rUv$)WaxU#k6t_Oi%WU^ggJvcoF; z#Yy97{g`&s_V;v$TQ%Nxd|kq2`ya);L3P>86a6HTo&&H7xqk=!SU*O-Ehjge(Q$kh z0u+=j7wh+oSPoYaX9$o5_#%o`6V)l39mmFI7nyDlYbYD?yExO@p?YBi1L#r@2eK_8 zR`#S8>w#xZf?xa#ZkhlJ3UIIp>sBd41DAM=yK>a7 zhg_a$e|V=1QBt9HE7z{#e+#4a9FC~nl2t{nepfyq0v;~8D(=5ZkQWJTi~T7{58~*+ z>>PdYb0==mPSysQG44=ARw6wj$z4(`=J=eOW-k1Cn zq4w;GT*Z(Ut&e?`+s^*(QW)qBOJy94^;PT1YIG6g5{aEfDE6av32G5XV3E&i3XM&N zOHH+%pa(Vnbh_AMpvVV56i@0^tm`Jdc5jydA{@cvx;jp?>u7raZ6t}$syGWL0HZ$k z7^3&@^C{@kcr)})CUf|TTEw*SV|w9nrbUvilKWgYCk&jT?I&8&{zSa)L)x?(TlnYp z)+yRU+oovS*KpHQ3i6!-l9#V3MYVGxAdefSXwz%=aZx)(*XEH1wXS1-^vt_KzWRLj zy1*eH*jylDB8F>rPDX%vKb}7$TX*fM$SvAs^Gufu>hx1YEouZAbD+wwR{acemt4u7Fm^`} zM^BHPp`oiHN~@3bfxhJZ+MfRO`1JRa7*a%)_EK`)2xg7+C#)(X)g@<2iTz~Gt>$dA z)RO1OYO5t8-`7}r5lJKEk%$xiYlHyO%v#Cnf|?Z&8`Vx*h{Uw5)2!a|t|PsD+nS(WpDYUc$l~mCfXZE6)~}Pc zGCPNBa+PQG6BVM<$WoY)Wx5mc#P9`U1BG7XU2R47PEH_+mp5|V@nto9%S8=tohX$N z3^0O+{g0r+u30aeN`Ow<+tn@7=V97i@o7O>X z(L;R1?x16uH%X6< zyw|P7EwQgUCw*1PIqAQj_QKx83m>@ELgfjM%JGEn$|w6DZ1D^Nqg@ATK>_axZrwpw zCF>tsM~8hw_#=6K^oMXDyd7)c{3q+Xtl31Rm%Jc4TWR=aR$hEI3~ln-RQ!vHH?h=` zDMGSD$P#pMa>4OjdxDSv>x7ifP8JUS5vP!sWGhx`AS9n}T5Dhtd3g6(Un0-{5eL21UuA$NM_0l{rtQb2K|<|41MrFY4Sh7hZEQ6rop75Y+Cyy@Ym0XA zo?C4wLNt8{rS%p61;Z{6T&Bl2*|GUZzYsf*q+eU3tjYl0?h15WE$0zspXMeJ?M$Eh zMQD~`N`YJ>&E1PA-5k>xB9~6D(1wE0nqP+2NIst}#+I^@trai2kG!*5P8QR@#z**| ztD&MH_T?g*U*`Qwv=v&Ywy})ZqGqx#Q9zx#|Aa^CDTz%aiA^Ndr6{7{^5#bL+(t%s z0z9r+j^J4-e=Zmc^diwRv+pNXESIZW>amwVja2uX z_yP`ps#Xwd^(eI^jwEz)f$|_TvoEthl+p^pmD(T5-mWPaTKJY_7fp&-KR#QXVY=Zw zoZv$}p*pr4A=yMx^aeg;>(f>(d}7{9`L7*0-@(uDJid(JE?mmyGvw@i5ThH7PmZ@Z z`nL|xJIixK4aw)WJ}zH%$E}nl=GY^lqZiHUr_5H_-M`0h>Gx(mp}X4REp|BbVH9D& z8o27^J84Q8e=A$nD5|-PBWq;w^Pi(kbfY4XpoD+5D%PHi!)siAjr~3{Ub?MS9iq%x zE{Ey$Sp&XGP5LpFeF~Q1Z~S=8?ys}utsY2C_o@P(^ZJ2d_Upfw@5|yWZFI^YK@)AP zu-mA!*^;x*Rc*Y+&+0F$HX{5>8_%mY0uRP!DD&Fb14i?uE%AOC!&{!~l;NLjh9dfB z@ladatQ&dIE)OnZfc#}k4i%xVaE({8|0Di64j$ zAJg;G_^h4_j<>qb%BbjSQyr$g0~|ZujiCd5YonNib4a>rcPkZBxzX8e%*+xu zR}Zo>a3NoguWM^QfbbrN4S&h`dn)ZZQTCty{^B%kN(`Tq=^?DG63qjrcUaF3DyTxb? zH4H-S)Kwc+ub;PfcFStDROou3qIbb5$U5ndN+%Qe^x!;wwLozeo5p_`91w zlfS3=Yv6Aie{b`5a{t(mWGxNSY(idsk8TRTOup+-JzeP@P^*k`x@%#jc@9!l>yI(F zm0z}3p}?5~d33>DY5`cn0dVTE)&vA)B* zvqz-PgScO&-!C3r&;kCF_-jOFQwqEax*-GeppZPkVRLlHZ|?I~mk_8(wbTKD^GP52 z(`W;$v@vw)=`kF*ukwYzN*xxh{qYHX^cQb9_d8FY*RZb=KhVHk7$u+d&H&!g9cIerGZ)M3}fodhg~}q5$t-$BVYKTaJ5JWf=p6= zxs>>-=|K&@PXoGnXX?WpXQRW16BFWm_=D~&9&44ALpmdxIG-wgk(&7c+rD7H4ol9L zY0pc+9odqGD-med}}WKo-|kH;NyaiWP3sG z)52+ptxR$UmN%DH#{Fl*3!re|h|hc3b@(X6x^VFd7qr=`7JFD{hOV4+oh2;L?015T z^2r)%{#=)aMNO_OES6C8sEyo6$3+PwXAZVhGy6sbzp|z~z0Qs%YN#8eBaYtAR@mKU zD&Q_oz|=(`NQbFX4h^2Ld!TM8qd_hUxD;C|E2vI`tcc8=|62@09(bObU5^*-`<&fy>JzitoQtPo{TONfsR4U1_z{xtvo`7d{?QvM@QWzZ7q`|MR>ObpDH-&w7BZ;6<-!-}^`? z*7dfh@GY%!qils7tb)ZE{(iQeP|ir^kOG9WayWxth01>?@0d43!TXTYV3|>I72F&# zdsTTB!V}sL4(roZt8A4iB6%BmllMh)d7k87eP-XYJm%vlaXx7C7Vh>JBiIo8EMk@` zdXh61W)xhom$sop|!q`0XY~lM0TLX_pSMpR!2{Ug5z%NY8FtbbBOO=P7x823Zw` zUDVY-T5>xVZqh>HTFUsyK-bCg8O5ZYdVyhbAhjLIl4BjX9>svRXo9>_DM2xn$rbOB z=IZsCb+mleE4s09{yMZk;oD$cB&q2>63%}Q{DI|_Jk}kRWK?`CS&_4rq*nVnm01US zV&!2iU!1mHq~qIcdWIusaDGmP|NOG*ik!NUxrBNU1FrONsz^A=pW+Q)?hXeAEgqR$ zdbmy$suAeFAy84YwNfZ#b7%0$970|#(e1QqPp#2I8#;Nya|?**G*Dq#;OB{3TM%Lw zE=m;g_Sbn^smmR#ykAIH<+$F*_(dp&ly03VZ-S zs5Ha3)Rq?cnz&v?c*Vi)#gj;i&c?t$zI?374Bu1Llb+TJ)jFmX+EHUoOooiPLAv-l zAyA>iYDbKN;7WyiSj!v&`)^U~vOF(U@LT*3LZvI6)3&Znmc_{*nLq&%k+U zO<7qq1?9_#8*53VmZOknpd%7YKi}RoAvixbWAW%5{|KPCn#XlYhpWKl%)vrmE0-Ou6E!1ZaSLg)huHhi``;W+~sx3U3( z(@ylLAKMxp;hAD#8j#6I)S`(EG?AEElu=#9a_r;i-3m+ju8pQ%X?5ZNrbC3UxU!lG zQ5)O@!3R?(4iHq$8o)mU#W%{A7{1?BPg)ADidBqOC_D~3vAu5(n4}PueS5h)1v&vu zkj}D^t_Gyz>5y~MAd7C|S&I8j-=ENTz}zjq8`)39kM6m>a@Sv7fw5qpm^1_m!Nnd7 zx{Zhzq~Xc7PANP&358xvd!$e(U!hP!(~;zF($}l|)7GnVlj~JzT{<6@wpf)9T0AzF zS-lcNAE2^VB6@wXd|FUVtrvk6im}`0j^9FJ#Jbq;h-%X>t#*$zK)D!l(@C=h^L>k&t@D^XmWae`-H-8M0&ZC%j-51N>U<))spFQU1+sBoLfWR& zi(2d*vnNgx4yD<`gF^X$kTt@C#QY0C`~QLQlNdKWFDBZKaZ`|9B+hH4tlyl%{$1my znK#M;;21X@U|49oG+i9$$XBkP9KD(^C(dL4-`I=EkUjQkYRF1cj9Yd*QQy~F$Yll4K^;Hm zoUhW?A6-Y*k$}8Bf*Kyk%WD+K_2o;{FNQ>%#a3bJBa(7Hh3^2;s|-Cnq7TcbR{01w zSSOK~KF2$?rC4R7iQD`!jEaQk@sr_}RO~wBTdH6R++wh8q-#Tm6@)XeCHX_hd0-PehsOl8HGvB-Gs+%hSVoIeIu( zkvmYpAi|pr8Eugn#+^5?)_ACQQ0?`B->Z4*H0iI3JTu2M+{X6<^IqtXWJt zB->DlrH#Z(;Srp9BK~`nxz_CkxWIT`;fegGdHUB=+pR)v`iv9M*VypHA!>kN5$pjP z8}<*eM<_6UOnA=K;6|WkM7-D4tkl=>J@)_C0u2(}j9u`k4IGo6*%iB_p^xS0+KAc; z5;;j@Y`ft>dU!%k{00_ZWscuT4E)jqD4Yf#)A6I&XS_}xWwJt$hy_{pSaPcngssjU z+Ur862HN+qjcB#91Ej28im}AxrT%M)cZUODLB#{)T2_G&UjjuaaRnY0o475}?{e=m z11@Rq<*zWB_M3q^vZiAgqE-G#Y{{Y`N6?KXm{{Dk ztz7XXH{QfQM_SmU6LCAH`j&ktzYx5&Wa^=K=ZbEk3`cp6UJ`2Ad&0h*qD|}m z!!|hsPV4>rcJ+JVa(>nRt*sn({uJ%oA578i+cQNg&Uj~vw(DKg;Vs(AIp=rcNj*<$ z<;fbJtd-I%O7~M5mC_F>jZxp??<0BI`IL8f@+x{zIK#RVfhPo)yt<3j>nLFbv|~qX z(0V$coiD$sHD%?nt4{~tI!O>O=fAg6;5Uc@zd;oE4c?MH5S44o7_;xivK|#34wk^E zJ?bjeo*}J$D=yl4$?kwfPBzIi5uUtMO;6R=*^TX>CEr})3x5kYr}uq@pU%8S7^a-S zgTBH~+~M)X!K0ZaL#Lq*WevphDozLiWvN3bIHPRuag**?>QE>}<&zzQw|+&9eo!`ENF$BE)vZ#}P#j z-wKagzN#!deK60{CZivwQLp*?08W{47y(H9rUDerkfIem#;0Czxy6K9Qm1~*t*t5E zt;U(LVB=Bhghus9lrbaO4(xGJE9eW1<8lS@;|1REy;!Q@p?%~ny?(7MHrR<0)IjB? zIv|x44DR^E0W3*t0bzIaZS}51F?l!w`yM-2x-Ncc(#=;rTSzGOFvfzf`iN@h3Ew?X z;zpE*=hMemlkyQvzm%<7nbA(?Xm(%h61U@b$tr~ETd9$3$noq{fcwvW!DTeC4qK$L%Q)O}jnD@d*XaA?Q*GjeAj|T+~rm z_Sq@c9Hu_rqSn&{i+^&wZt}G%=KWKTal$K7ZcGR{UPw|HqA04gMfMeH9kAR#pbrMQ z!!yKE8$p$v2oXQ0yNDvfyxXN0lAnGj7A8n_aJ0uhymALswH5s;cRl)c@OaA%ckp=Y zj1joP+~RWMR>5y4^)os%^lq2hcYC^P-^s(CvMhJ2xTZJeHV?P4@ClmX3t0qqFz@aZ z`w{__g2!5B3=SSsKDQ%XG$0`iFybVtLBgT5C9igG=iEBm z4Y$cbhaqAK)YDsXw4#SQIH!9?WX?Idu_2br)}4(WKL8W61?s@CWGj2MS1bRJzoRl< zM0;R-e3CBB6N(p9^p<(S2%dl@{n_=5XjB|$RDh#)t1J9g)W;US~sat?5{`@^a zJ^s8+V2!;AES61RwAb}vaufIzl9v6cy8~QreiQrCRi}gQ^VWa73;dU0={nY&IGh$v zzy3F|{ukEGED)Ijv^6YKD`P!pIpFW%2LBa=J|9G=*z3|n1d3MnF|AN}nl)&=1%qYy z!&pzTxRQ0I&a;JT7&FxQ!Ve87@F8UwSRTG_0IN(L@OVIR>h33RG)G8TU_Sn<3o~Yn zafttCC!Y4-NCj5L26l{yeb5YCk0`X!bL4GjF-+3G$IQa_yA4f3s!@AAd_M-4hxyXb z$&K2RwlX9Qa1NY7h{HZlDF08yOkockNd4sap45`B>>0#86l+sZHR zpwzGf1h?p6Bt}?~iBq7M-rD@TZfvq{{6w8&IQJ_WGy<#R+x1Sgvj0efU1FF%(IZUOzHN z21R>nB>9d-#+qCR)6Yj8zK^itDUdfIYU=iDMfr}v{%fHTD*f!^KD zl#DE3VwIj=w&Zvt7}Gi-BWSz^<}wrxpfIEX4F9foc}oe*BF7nG5ko;H@{6%N5YpQ8hno? z$K0FstnO++D}bzIMfq>^OeAWu{8o0MH_rRU25%we#i9jOj{kd2V$+=8|081D;v%AEH!o*_% z9a0t@w((ZH#g0$$vS{Yt%Qi0Yrd3Pt z4)bGo#cljkdQ*ncmSTtO9uQH*zne5>nSsYtteo^tpsY-E_do5#qY63)Zc$abS2}Rt zEjP*}E?oXMHgqyTmT`%LBNihX)qeg}$899@d=`zn72`0LO8a(**Gr1AWX;O|cU zaw)%`U$GaFfbW8*C-}4N*H}*v%K1{3nfmGQcifjT|#WDX!9) zM|Hm26%KH2K_0sUdBT%Mv*+HdMP65%!Z3wv@sLg{i@jE;WI0sw z7PQ=|3p|8;K%s{Da$C{uo8G`Au0@%w5 zM?3zp~E=UFQmSA*gX^UcSrU+cwY{CoD>V%2SsQm*U90 z72#HN#SvF+!m+sqp4RN+73bDQI|tWCs(mLM=UgS3k4;w8jOA}MFSx#ir$((*+;e?! zeX!d1nO!&O;~VW6T;J+c%1c)2@JhS=isR#}OdH`d>a2(g-65X5sU9l&!9LN6=U^Pd zHQ}tX@XSHDBDzu9`Y+%j)Q}^>43zvwIa?7=<7;c#gL-6!8nTG?;lAwZmw zYFp@mSk+m^u^^eBk9B?d7HViCT1zu_xT~iVzovnxHVBGf3pUT%F-4p2Yk4I|6>s5Y zeL;KyR|K^%d+)WJ;Fz4q)Z%EZ{0Ld!!15MAC^M*&=CMxwo!8<6EA3eOr*_=VSpO62 z#Z6eaBKe!A3=dG)lLxw_3B!Y9$}6V`_YFB667B#8Uae9@D9R~zb6}s>9HYz+a$*&7 zN&2DHN!yB-6Sft2>^FN}aXJ6_v_Btr_**@kb)C1UO)SulV0zH#?SDw`k2SjYx*Ts2 z>`nvb30~J0Z~s=ke}kUeoU%fA6#!?AWPmvl7jE&NTeMZi7x=Cd^Meft-t>|!f&G}e z7B<4KLBNDtFDklOt!` zzqib&FWg*uxV}qZSFC&T{6OdhY%caxU{*ftEWO*DvvRyuP5vltRIHT#V(O4F`+3Jf z`C#-)noUQzj8Th+b#qJplX#Z@3jWRXd-<36BVVGXZr~ z@G}HTn@-W783Uq!;5N8cll&r!FfAvHeOxnsr0QUJ9m4EF+C_TURPk_d*mbYWp9Y|r z09tXE06EVcn?m}w7WshxOR;;*$NOQWl~+MdxA-gbddJ2G-)#>b-xnB7yN`~bFoH>| zc^mC*tGG}anBPxKXN;ZE87zJ{NpTvQkdFwbi0z&DMX|GN{(^9RcCHFssmiKv%`Y>i z_oyp9H)BeDDe4a8aitp^K%C(5HouynCwu=j$3sthu#G{ed0^JpSTAyB4HB=b!p%q% za%K$hntio0iT*_l6%u@E8$Kt<}jBk0>39Es8++-qNnb%pzmxeI?V&?}ejKEb)8kIQ6@KeHzNF z?0YWf`9s>uJI@u_0nnu>VoloE^)UuuT${OmN|`n-`#4hUSZQ*7F4{WY>`NpE zl8kQAUyf(k|I_;)&}%=;(fc3NT^l^wpBuRw#8(J~hHO!2+Q7F991Utm*o5B0rv4ktZ?J_C_}g zsuE8S-C46>rWZb}c5k-VwO8Wk0`dNjeFhY{|Gg9eOsP3UpS}@+$(hkYAE$jpA0OH` z#jQEu?C8E;nlhQHR^sNpSB*}J4JhVH?=?G(L=|$_(U!a@z28_%0KV&&$M;? zBk)YyCKp?5z_g033cBqIx)utz$v55ayyL^HNdn8`KXlC>#((<*<=`KIG9c zufkI_b_L#&O@Z%53;DKt!TTpb#_?v>M-uqilW+#^=jp%JBWNf3U!ykq6qPXb^cuVL z(7P;$dCkOz{s?DYG@a-VXR`04?=MXHovp3>JAi?m7ULl+9uuzZXP2pXLq>9k{G;ZP zHQgT0y)uf-<@U9S;d~8fv-FZffj?0SI;A-Yb7tS=|MFOKa3g0QJzN3{#rPeEzl4^Y z@!PG&uZ^7{@D>3tPoi84%54?N6q~&LKVAECtzK(E!;NVwB-Kflci9G9b08O2(XLZ6GUQ7$)pYLj6}CH}N<3yca>Hn74~7A4gdIQ1o*?w}j+byI(LwAA z9^)#HaU*)}Zx97!L?7TG*7S%1=Xap`HEtY4W(AV5xZHIkuFZ(P!Fx)kf?prdyVW)O zoa#>OLfMZ!=A2AYhqe0(H-U?0-!2~HLnW~aoybEuUiuc2K<7v*^M<*L^ZL%%9JrAD zINlNz^R>jiY4B)bv|R7SU6ueWFV|qJ?y}p*?$g*A`(x_+adq4;I;;jUc8Md;HRoK# zXdm?!9z~XB_Ps_YAQNGHLO)?q1cvJr3>VojWC24~-&q?2h3Yg1_s|Lq5nuoud;P16 ztxge}(7XAVOMS2iF@00)8MiZq;D&RxHTI>{8;GL)T^Wx9zQQfCjS^BDr>FxyFmASP zX7v!4?P<;_6rE3D6HyHmgkgIVSEp0UTgdEN9O|Udd_=-cXJrBUuo+7N7ZKstTSB9a z4(RZhI}|6hCiVpSJsa4q$?swvnFQG$I`owTlKh5l-gy@A?(_gJa{nzN)--k|;4L~C zUIxjAcNZ8aax*ziTe<`98&b#_FbVH01#ca@K!>4bPr-J-E)8Bj?R+;-?J3zxESjx8 z!GH0*@fm3EU~di!E^C!Fg1P1>(EXsqicsiYO_`wk2T@i6C~2Qq!zA%PH9f3nE3Va6 z-0sq~KeX}JOw{F0axKx9IYVTrat)6a2`PS=6*211@ zw8b74eanYJBZp&O7vBQs>mSJX`r%zC-)rWtL4wUYzkIJ=zK0}IzQ^ce;8lQMM#>fj z`hsx!sY>Gg@MfTu?;twGc%!S5CL3cEMf;2g`v^5NS`61?1AWE_g7nTP_822dDc31x z0T_c$YzDT%H{RnlaFk2eYvQ5X+w)}+Kku_1y;Wwh_l?f&gZAwm+zO#m;a0n33yvs= ztEJ@T+tn*gs^o~Z(!Oor7P^uOL+p}Gb_FT9gc9gW?Pnr}MwM@NiY-oS0d?5b!J{qydBLL}YgG%V;tsYWxcDho(HHqAxORH*_@EgV zNB<}{wOWv%q>GktP0}voX(>C7(89K4xTTZ8WqOBp0=U$t1Ae-c={(Bp@CaKV7QWa` z?%-Yo8tqH`m&Qh$xAhAiSMrQ@k!M^J+(V{@eSxnMdFs(GQz=q0fc8G;Q{ttQ8T~Dd z${;|9xbyegrN=QhgLnn@{HvqbsU zp*oZ?{ChhTdQS&gUcQ7GvN}%^?jzyvC-OXW#E|b)w{hHE8_V&UgKuV=N2{W(xC`H) zx=uc`SHH62^8?rWN{(p@*U^X%99{G}7arGUku_Fw$!AKo`AL92!sRX=b}sTm{GFzy zR)i@vv!Hj-`*7gb&_6bz>-`hGE>8VT%UlW`4+1ohz6dSu?6ZlJTX^u%FU77-wLrE_ ze^F@f{Mx-aRD09~)72@rz3|{{a_@Lb)(2jRb+#Rv$dr;U(i}cRM$_!=moc~;K&?&_ zRl!&a)GDyUirlP9N>mBfjCIy^l%&2b{DPu)?6JkBhHxBLr}>w6Q={B{B0o^}!%%jb zf4TJo!3SkYF0bNNu3mNA%BMBUm%G2j53dH`6Vu$vty`0?8f__WasqRjPbhctqero^ zDj-UI%A;H!{#dz`A3LZ$xu#%E!3J}Y@+;qiU%B=quZm;&f}j3brhWi<3Fk3s~hvF<|qLDqaCtkJSj#lIGdUG;_$G-vsXNjIFY9fI23( zpl!17xoZZUd#;tvW#5FL6@^+0&M3{wdC+=sy(A5bW-KnZ{KJ(u&oyW!TTXH3+1^O= za1;VF6%fEniE zMp3N=y8Ok(Z4A!Q{bE;|cjWUDC6U)JvqMTyBXl%()};FccPyjp(@z z{s~3M-W$``gENEYntv0X>ra6v|0H-ylknVpI(WieI*!j88K2q@vm`kMyHNTFZBC64 zDI<)=SkX0I)^ALdQnaNj@}-3SZv6MPN{f_sWl{@WJ^P>(3?av(m1RFmhCGo#no zb*Hr8tS_neQ-kxx^bSAOd-ukZhi2L4w4r(NOTay>Yei1hpz5;jL0lru?#p;3*}Y47 z?lpPje(^FcJ%#IOG_XEJ9YyP{yV6_a@%JuF4EArvok|}v-uMU7n2fnZVey8~wHse> zET4~P3&Rw;|E|8bYV2*_Dc~89R#5d-N)}W>EE0-RH}r z`1)v+lGCAl+Ht6i8fQ+AF=At#LeU-GPoYTK0`s}_!vVhcYUZ;eJ%?XULlVgv$f#(G z_QG;RdrHp;;K@6T5vdEA@MQYn4{`-wpgHO{<3 zTX9-S%(j6LN^Ey1F_zmAr*|rx{%3=N)~=Yq>7D5~{cK$aP#3=@Q!B1oiS=FbnGE!B zh`SeQ3?hPNc6>JhVl};al|t18$8GFg#gyCloQ}M=13_{<@S=Pmhfo^+?k=p?R@Mv7 zH7op$^_aT$J}G0Y-GaY;Z<}J=+5L5{hCMpPnA3f;h?*80xAAt{tmhTpUa76zfR62f z^{S0%kEuuRD|GPoO{T)e+lkqmMX4yV36QqpTtV20*^RhF#rjFgNTzz$%GkTC1kb5= z@@|-7cYmbq|(>`#Hq! z440f9Ur_^?K3tdc=~NS^j@Qmp>Am&7!UNjM1G+ihp?9Nf_!~m&k4)jJ33HLtjaNrH zrwx>h(D{(xgAa_?%~Ont-J8;g+!?R#%~1DA>zhYaa~({ZQs)8X8Ip54bMlkUJKpd$ z?%;K3Hl7ESMU<9st}%y%tX6w5ipnA}`lZsEw;LIIg)qI4~Qae43IDt_2p&O?Dl ztL7De;-s^9vLy}%H`p%l$O*9I#s_|&9E!vReiU&Ha5kTZ6A%u^=8#5)DskkoUEq_C zR-`?0A`00jq9mmDm_3V}(GK0>9GT)RY3HLb{>N+6{Ey}SlD3pTa=44OLe8r{pmUB< zo){%M2SNJ_KZSKl%*siRelL&mc$Wz~=~1OT>UP>kcYXe&zeb$i&>%9Hne1EN=kJgF z)$=FzehK-6Ew9530Jq&DJ$wV2Fl-qpbXaS@gfQsN>EYL(I!fiN5wVe3;|#7;$T_=AMJN%kOGkrTGp zoKFMEQ}$2bjaKIZO7fJANUOXV1mjFSq>`tcvX>KLi%i&{oUcWVClV%W0tSuP(@VAc z?P_}!I#Na2lPX`KN^tH@&f?8!XYs~U&fDm)UJ@4~ z1Pr121)+FM4)L3EtGrOuHpNU8k8(RU1AM;odk4AL=BT#uT> z%zj>YMUgM)*m|6gpmuPE>tM;TS=~?;c}i*n2tgVKNM0DT_rJ{Lbe5<2NjTc^d3?K}nE9}=@Mi67bO!@I3P ztCINf7rWYC;8G|g?FoK#r^=b&$G}$r;uQSIlF~264-fv%xE$nXWX$Y~EJ7~ys{8Og zTM(0L=h>|HyoLW%<+^L#`n5Ioe~Nybc$%&qGyWU#-XA<3Ka+UxfBBWH?!b36@ zWqiG#06OO=4|GCWv9_pKZ#h(rNw|99PMJMo3YrrCrA|WT&V4|DZ*=+bdLn4I(V2`X z7@nmfZ%ZmH6|Vl!vkLrXYxQ;M{Ko$h{sKVxeE72wpr0B1Z;d%|hVWmVfdAO=e+~bS z0PPFMzt8F6ztA54OwxQK)<45OoOi~3n6|{3gRe;+QuO~{#qY;FGQ?k;&tU5>9sK^u zuz!i)XEeS8zi@nwGala;^)tm0&F(_8wPR?9@lEE3TJTXdfI(Ty*m0Db+mt?D*Pe=t zl+%KxJtIPe(K#ODmPBfjXaPJR`g1)@&VZZH?C;QP_kkZ-pVCkULE$h_lNb@$y3~m; zYzgi-zQ&o?u}`pdK>6%PZ8=)#57hP;Z%N?1wk+)jwU`E&w45Qy8v&?UKZ49t7@cU7 z)Fewp5%yQpvi}*9JrnC2|7Ed$@Ish@P<~&Y7-3<@GCz-9G4g^-Dv%8zqLB@N0M|QV z3y|<1BYEOsY2TceZ#~Fv<_#0lq`v%V%7~;=zC=C_yi4gsT&vp89d@^UGXYCh9R<@n zqO9P+YP_Yd{=opxlk$HSElkqYW-T^boj6eG+CSEZ4qWfJhw9s680vNC+fQi+ryK%q zXr9pCRt7U)yg6Kr+QRufA_71}fYvH&*=Amr^^v_3|G`@s6D%F9%(stI>6jfzT&sGP zT51(lGpC1_OFiosKcgjLs71dfP=}pdWZrXN*0RnyUB{Kd`{Y%zq6EJJgHYlz{R1$^ zKRBW~tABM_*L-oi$9aAL3aNkceXX*YJ&$mbIU|C@3W5?;eUDhFO(D3|77=>0hg$2ILc7cfo|HTuX7ed=L$tW9N$v(A%V>*nKhx~V$ryy`M7 zpR9wJZ~g*64q=?wS^gQvK#an^h>UM!d*hdTHNtlE;m(J&`SLd|pRjQ-$|~wuGF^nJ z;`BK&F_GLtX3|^%rhJHI#xoS~-%d=`QG>X&!{Irv;do<^rHvtv{(+fI9hb4sDI3u0 zxkSbD98s6mKVDaslTUz8+A#Qvis&?o@cojmPFj$3b##Ib@C&4!Jt{5SI54Dh^lZV= zaIP=>bv5>?WKHy=!HBJqCYZUmaqBt7}&nAc=c+Amo(&d8{S|HKMw#KXhkF zP|T^3VzHo{x}lh;a$T5_iFpjz#4gOo#5{&DjNF!2-f7`R^K z0V(Ukyp`qgPp>>IzFfW%0yDg-Rhsk(pTe(Z%qGeu-)h#UwifG|67K{ei1v__3$;Kb zSe2Cr(l(;229-$jc9~83T-y7i?mEz+BD(8*Dr5Hd62{$IF+bM(zlm*iuMDkPe7vxq zoR)<-Q4@>fOX#7us1RtnsZa|t0?XtSN3bV#sw4PQ;xsoJ6yOknwf;EMClHT@S7D#AkBcoF3UH?~p^XUad^lJv)!bqb)oW?;j_^>#%Y= z-$W@~m@1z8v^Co)yMi*|$2ba1z!ag49kNvqOITZE4`DYLOpIL>(3X>@ie@XZo(Sh5 zp=^C>>b*u5wiCTyZlQHkq6_<4y(Bkcp|KAB4j|LzA9j9aEE241ek(9ew$GgMF>`LS z^{d0j6ZO;f&-+0)J_dI;i2yXOVRyQ?i~eHoIb-I(_~E~>rf(ThZ|1(1;lFB1 z%~FIb&8*c%Em8I`0t>!#G`JzlXlT`zogH{5`U;Oy)XaJ*^_Nn(6c?V4xm=z~xP77}%A-E; zZz2IgA7^IYc0Lvd;Xz`(kz@7ElYPDp&G4u`Me7IFN(dOwwvQyUjfd=eA;|&g3t#D} zeXsL)bIzsgx5^r>b*%ORnFIYfm@Zhwv_ZQr+C}E&DXH^cuGdF^AdY!pt!TNjW-JP| zbP^|nDslakmqu#X^N8!`;>=-rT9ughfXt0`L1(iM1}TyMbmP|9dVR3QzRRd9o$d)ehHcEC@rECuP9Bkbdqe_i2#4@G^JT4t5hV2F^bCI=pE<8|N)2se z5NG`)QI3Bf)qcgT-~pjKl5=gj=CfJzu!4u+#019H8qYOWb&K&sJRI!9+NABssC-SijhXVAZlts?9QKvw}7Sx+5i25R_B!7Xql@s#P}cL_C&B zaiZY)Yg4^2xR!VS&b#T<2ZxwA58D1+3qG>xebOqJR;Sek5+>F5WCzI=ybWqV`r^g&})y-#YV&0t~*9Yh)>zlHdH?#i-A(=N^Ko~XX z^&uEtcsl5{Y=9zIn7w7NuM#{c2m5<16o*asg|8UE8NIX5^k0}HSq~(8p(5EMRu#=a zvg1VlCM3JA1IfafL$Z|#lC?MTBUVi}NOmf-<;Hj=Q>*$r6TY~7Glo=+)eD4d_4)-t ze&j!|@hDWgAW5}usP@rxs`Z5CTqsnFAQ;}X>_VGr|Hz$ryDM}I8|?3Mtq9lH3Edv+ z6d0)JHXQ*P36Cn#i?a z-VeLd_0il|-IhS%k8=elcOh#K2NVN_!b=BI6mAb-8igORZlVP;cZtK=hja|eYqZ`# zfhwyQOFkn(sseTAHK&)x&gZL`?=@P#>`=$zRjYYGyD(F$bV(t4P#VKj?5`MK$p&2& znZ3?!Ylycf`6`S~au#sV^a8T^k_(v@kQJv%MuP{;!@q_1Z$ocbuRf zIqFF(6Ck?i*u;=D50N36+qn4d>DE@RD|h+FdL&nscrpJ)W_9SKM*p{nU4yVWd?O!v z2cwYHJjl?>NR|8jpwyAdRdPY)z#F>xf|L$$#jZIOq~%Of0;K=3C(Qas7eM+sHy(2) z(WN==1Cbv7v9zKa=Xgryv8i!o>OO09fW*=(E7L~zYW+nd*uTo>(XbEkES^^fJmI>)kNonnG3 z9XF!C&ytNuvAXuoG$0M447sM&tFAu1yat^KOs41MB*u|Z#jE}Kh}ZneyGXoaoolSI zh_V%a+2;jccrGz?;{Kai(MQ`Hm~lw0=dg-~eQOx6tIv5z6!|k{G|ZlFiD!f=X1*g%OMw*x-|CBvI3ye3 zq_&6o<7hlvqDJ0|&thMad3wT6sL40F`by^X&G%3D7@sLm>G!Ct&TnD2yg>Tsu%T*J ztq;T~aBDxTv-%aZ$2~*K`u3T1V|0yrfyI>&imymb8HGoP-p=a>$XROENa0D}3)JvE z?792q>xPRk|JLjMoEB#WJm#H!IV|sHceG}*2XkV3V14NA3q~UGx$1%Sr6aVA!UooZ zqQk}->$kk88=0~4y3WacAUS?{4wY2?90(9jr-uQPkKUU2=&Z0hHk_??Dak`zAp1d` z%N09U?pVQ8@&ugR)CI9uJ;owg#)M;RhKEW_9c*&BO)EL}xBb@;$8tJ*%=}X8`&8tG zj(|jeDX({cE!wI!tNoU&@v&FH`9C9@6LkFmqX%cbRs51|KbtyFeeh#h#ZNj#M@F$x=$lKO#MZhpsNAht}B}kH=pw;A%Ci zo`S+f36b8AFxWP-DS2|*d2Jv|ui#X;A8ae&3d5RoJYjNcDRSv`KrVAAKlhM*}uU*P#Xq9c$bqH2;n#6P!OAC z>(htN%@s}%+ES;s^I!z0suIzoy)aes2uey$|3sfTYkFN(0c(FJnONdK%zuP#5RMzX ze^09<4Fi)6t<{&xaDf=|FM^Wa3A#i_^Vs_Sk3U1&GdZK&Xpg?nlC6ic&w5mbp-LDu z?%HOnyXp4bJNBKOZzB6O`_7h9Wv{UBWMksJCH9>h7`R((-|6<9-@Yrg?>5PuNnn11 zQ+_ymrF!TF>$87jUm~8!{=R*g&*eP(5;iG&s=9Q&$U%n#>OYpL4G6wsxio!9|E(^u z{_9Ic3`Ordi~`g{{^{j1dbTvLd=0;ZgDU?E^tPHUs4MrKrqrteCh%0<=tl@n{GL`N z+nmpsKigBkC@2l{pRp4sKW{FfOdMnij1_7ebEO=-t|Oz2$Gp2V#WX@blFj=(=W&2= zWpV2g?$k^mH~5Eu)k&+oNe)-Nz)3muB}<2kG{x?%wd*S~c>ibxCTk_v(E@q6V4a|R z+A+)Clk8^6PUXD(i?o*?lWWNOk5|~|AABdUi0BuT?%6EtpXCZ}**SABQMVXjZ6His zgOKjIfgQ0dwO*sl#yOo3wzGwS`mz}9rrT5L;lBt5>c)?P@)N#mwuHJOUnZfJZ=H=8 zANfQ02kIFNfxE`o5PAnfwLjv&R^(>F0$8yhIr1^F1$*bSWDU-nwb5CEUE(bvF1QnB zqoaVApo7clVBN^+jN0R+)?#ibwChEea3>nq)Ov<5@P9?$`r*1W(?^Y{67oUcntQQg z4k{V1)cI8GDT?u$w7p!vP~VpWPMk(xSs?m8LRsK_0^g2P;2Em?zNGqaCTiC?m8uub z*2w;{XC%+pHvDI-FUB|JbnrX+VkZ3z0e^Vynd;Kkh8(~b7o-ab67qq66aHeR?lj}C z;74zA#_&JC=1k$wOu)aP-@k_cM}YQ)_|xa~@c*y$#nIX`?Zfp8ojK?%eMs)_)=IKD zrt=@k{XX>3t%zTP3}>|w*e*c<#Nzmyzk*->0q+QBfq-pL>}(pd=_xpOJNxDVwtuv7 z%92=?vu;}DXo~Fm{`b`>;SK)H)GDV^E+N8NG9_wBzvw62r&hR{065DAel5vS z;^$&uMwoJ?8K^{RIheP2w!J zpd#IA*b|;bN?*kIFt_Anj$3P2!yQoI2_yAYS>Ac?IrSdOzpK2)Ls#W+97orb&x7Bl zHNL>E6e`Zps{Urf{B66aK7)lcPO9}zfy;{G*7PS)y08G7!^b(#oo&_73)GMz58+{v z{SfQ1R-NQ}-wWAoo^Kb&Ib#wD>1ql^|0#Qg;agHyzKq4Vd?`h(UhRcTv`X0}6en1| zggMhHf6X>WdpTN_c#^X)F0nEWBRUCl_llsWN`hehGjcqx>T5Dca7*0EZ(m zhtFV5VN8u;Jjv~W)GGfWCwS>H4yA|S5CjS zbbTCon{2Gns_NK|qtD5nY)q~Zr9r~6It=om!JX_mW5HASs>Amv} zAVI>IVa=CTMb`uH6+PPHv8&YG@P2Wux6aj>dN$!v1MEu9SZZo4ML4s>`sycwV7skL zsFFtC|G7TsIY2yFAM_5XO_T8sCvcK&Lz3I7(rqYx(})5_rbXo>5lhGL2+oKiqUnV2 zL^hlTZX>#pC;Fx-C$&$IJTy*0z99Pr@C(g(+GP|%#jXJ2yhu*0t9AEx=r8!xT^Hgb zrIzwZr0U=8xg1Ti%w_&2fqqik%cEqF7c0 zX>#LdCx3lUmHl;m1?dmUuY-Kwv92|T7uz7L+pa(`UsoVlHV9n+0dc*W>1NCLtu1PK z_+rOj>NN3`5s-rmfrg5&tN0O6L8lphU-&Xo0T5@TzCe`<|ELnj9V7z;$M|b}#;2ab zH$2*i9ik+7hW7QRt0G>l+#-Y6>_HL>DOBG_FPYpBsE1AFG=3;{5yzfE^dHUmC_Dwq zpSDG@gnuhnRS;=E6^Z^#)?cD~Bbh+7e-8QUIa8D`f$h(iTB-5nK!A0~(d@cv zZ;_g%;z5?IrDM&k`+Vl$diQcys(KBn=25P*G|#jUzO~926>h^mrqO!4zYV^wF*jAa zW)C5?1K(^wSnk%IBuYtLA7p=lm!qFSiOKZ`UA82Hd5Yw6(f4>D& zRU9J$81+ut!oRbcjy3a;j$)qtXdh-|833@}-B5sXZ9f2P(+gqKIjs>8S(GW(-wM>^ zER)H0MoTj)q#a%W{PHDCTea7m$>|USa%G4iMj7LvSY^n7 zS{b(Lg^+)Jsj8T24*hOkC-5JPYEUGD;vb3=<1P|LY^-RB{*7XmLb1ir5m>+5J!+ZBHw51@jz%=<^A#@qI^#T zYdw1S&ieenxb&fS*0;?lBc^(N3OHFWJS7{*z^XuFjlC7nYW1NLEY`D%_hMC@NJ^iAq4-3o$#lm(C9MR=q=iaE-(C3~YxXVmnpg1YiMzqJNfB>2Yq#8ht7KXQ zfk^@-c*x^%;jH~~$0MF&eL%%Ykue6(y)viaNvq^0XB-5=FhUROS;M{N*W10}vfcUP z&B6DL8(MY*JGB?294iKFyu9YH-M(-?9|rr37+;*cY@FHm$hdGX{=7%9usYX=$OSW$J$zG}ZeV*B9&2&k$~7P9W~Y@g z@36c>N45~_YerKn&uL7wsDzltzJmNYGG`v+4i<#7eTJltgrPj_DcmFry0&l_6bGdH z#|fSKN=6RULWLChjMseOZl3T>&H(zBdCZ&o_`+lPyQ#oubn^j(Cw#|1S|*ZdJ~U`uJ_HQ^-k68nh+efiaULSF;Z=}WYSUUQz%R<%Q0vtxRC@X?U0|W2+%}DIU8J% z6lnEAect-!8DE1Q$xK%7NK3^_#GEit;*kzzVjC;o6G}Rj-lSCMtw!24brP%{&_cg- z=joZsI#)dNUac^lqlEx2Zs` zsO)W$q;8w3?QO~@%i3nBV>8w7ly~xY+1EJ6kas|2xuDnNg5!d5u$Q7S1}}^;@`&6j z`&N|U^_aIprvsW;4SqIm=m3Iefep+?F9i(W7oypiZ~RmK913i1b*3p-q%A5H5D&<^pm3 z!eOsaoPp{4A;<(;+w2Qp;w!lqDt?CofR`a>`0Nq)MaG3`QSN}m$1*i(nI}9}_ZVXl zD~&DXm+fzCF~9lH`9Q7*aSigF?Y31#YL~u)ruoPOEp!7{jKPvIX5M-rTRP6XqLZSX z6hcp_@tNM<8G87(Hr==lM7gb%Unb!;i(eM%8=ARp;TmvmSjK*WBsEHSexhIvgfvvF zuKtMsXRM6ALoop{{?%Hre>2z#@gF%$u(Wpzx3V-?+$PqallR+!%zgv1XU3Jof?5@2 z#g|pm;G}y<--2cEMlW6rwmie;nuPZ@c3Qu@Rt|h?<1?c6din%!LWMI?MXj^s9Tc9a z!BFxLuQ`q3`?O-F(XdPC8QY(gWM@@}m5pjzGlNFCP`LuzAjsk4gn zOC=$!vAjf3QeosIfYy4eyA6>=E(J_3U9)O9A`DDlFyosUnB5#ugcaG`J)IRv%4;NE zl(?9w(fETb0|eCF5{>1tANfQWuwOIld)4bhdu^`xr7T*I0%XAH+K@y!6Cj{T zE&MWElbXF@`agkw!;~;}piqG~d|MxH_z}v_vpwo~VFJ74^nBtf_3;|&m~Nr7C}8A3 zHaCPM=Pk!37WPIr#1W6QakU*fq%Hgk9COseKlWh!Lb!LMHBBC+(o=osJ5o-#cwtRm z<=S3~$I}6osS|j_4R1Uh@PY7}ld+S}!au;`(}Rg{X~H_m0?w)=j6P8@ZsF%{dT|TA zptUfq!R*Tg-SEmQw8sUnsr{Z7)vI~+68C(fOwGhx2~1T%Y^Wf!rlKZ=P(=&Z^rSbO zw;RL$C%r@ScFX(_^^UM{g#P*xJtk_f7UDDsCFLeVqO9*mF2g7@ylC0D> zIrDTA-IRH{iEd7!njo+HB4|GFHG4U*#Pr7O?c!FJgPrzmSNkb;>sGy8(y~!1sK86h?UEI$ zq~3atTbuqJ`9Oh<4_{Km(dyY^#js}=|2q|7O_X<_gF+{-GYNXh!HP?TKG;pO6n$iQ zWZL0^vu^Yb z%UdA_&OZ}nzsTC`wYSqOdc&6LHDsR)jb~S`))NhzF#T#4!%FAkSVLw!SA*de)9nz5GnM8ytx2Qq@Ck|2U-Z@vg+V2RIb30 z3O4R0pb^}?GPQ=*_yjB8XM7>C&lx{eMe=4S4E~+M;FHHt3tcUplX3lTjU$k23w<(< zVlpS98s&cZ1GX30qi#aX2~s}9t(V&}`__(k!i3aWSJ@?F?Fv#-V3+h)CG}PhZf*US z$kWM(AZs>(9dT`Gz_rCwAK|-k&o;|mzI{X=3BB9yGjEC;t+g%Pi&`ZqM~|XfgB(%W zMGf5T>0U%s-IfAlL+yt>iZ&SAYTwN+BJ*gow?FYZS>feS>?t{@g~GCS7d8r09+LB) zS|1eu35#d1ayFmUD0~)UEDN2@hB3vyh&m^M{j3T8dKT|F_R8lC-=|Ojti4aLc1{yZ zgTINjVIs{si=A9FCYHa31PZNkxFj2ejArR!^B4AT3t_yAwqOm{v4@yZJ}b26^iY;} zdiXO)HJIRfS>SB0mmb<&uhT=t!A?%^;hoY$zx1##Vy`&@VPASUM|$X&9@3E0L)J}J zaC#_vlG+m#$0zT?q=wU!MkD`q-7THD3dM?R4 zB3O8-Y2*aB9{(T~qkjm26cW^zseDEeR-R#Z6h_lt$%XY~zhz%y@eqC~+>8C$FYGC{ zv~uJPm&sJB9Z79sLhgPADE9TL;+>o6D15w^73klZN+^dF-AU>I#r&lXcv;4KVw2}H z-i&4Xj13h3jA9mV>v&c=Pm`t}F8CbUIOy5(q0iW6)5h5Ulr|X9rZmhE{S27nVL(e0 zl)#cG+opXwYC(q3?5vS_V22vCJg~zr*de6jZ2EEm&auHJw)p=X$1hETw5ts%w#)*l zjnNLIHb$pIDtN5$Q1DpoFA7q)HGmUb1yTvV_kRsnH-M`LE9TmGTJd%E!W(TsZH#39 zNMR)3mG3J+27`wRh{52YI$)~((dMI09nv&`dhapoY&`ot%bP?}2G6#DXIE1!3-%8v zcJ$*&>z6-*+@KrOgFf(={G!jfyR5Y?B zaSK~RefB@OkpCnUd581Lf7Rc%>znOobkK@Iw=nKE}-cc!qST zyww);iW`tPvWZ$o$yk{dab$xfO!zv&wu)ND%zhlENw>X$^ad`|yn&Ra=?{?o!6DmDLmvC76mxsw;1$!&rF#q8Ek(D6V!}PeCvVd7jPaWZz~t z$eY=9udDXd%Zy@cT{$7-#VKqu4Wxig&}-B;eyBa58U660=WKS~4+AQ7nmj~E0q z`{v}(Wj}7is}#h}a4k{YG39X1!JNPc&T!QT(SABV#T?BspsY6>y{95pl(421ku$0Nu4YOE& z7PS8#>b?UoifZfs+$r0#NtOKqD-PkIJi|;#5?s7M_jN#(@j+6Dfe+7hI6;9$#jW%q}y9Y`OkL3qW ze3{cz z@u%Vm1*E^uWPF`G{JCE5aXS~o+yz{3mgWq(83>*t=;qAZ>B#8hyExpW`YG$zFOVfa z@?tm};Pb0Fz;HryQj^Ac*@Jk!nFos%ylqN)tv~T@?)bCWj5qo#yix$Kmxdh~uJ!9A zIK$@a`%vO_PhA)V(4f1nwoRR$!~pv(@R~QYhPTMaK^k}#i^7?gV8`<4t!7UxT39Pbq zH@Wgm_TVmT#h1N!cu-Q&i)F{Y7~S_8IRLmP+x=-9;x)pj1S1Zp?AUT_?!P~o!`AHB zt5Dmewtlu}$R^mx_6)g9-mj4N26^8i?_1^l8hPI)@7K%w4f1}Iyf@1Gt@3`myx%GB zcgy>|@_xU(KPc}_^8T>A^LdXwLw3sh6Y~C)ygwuF&&hkUyuT>#FUk8W^8T8SKi~~-6!vz<-M!CcbE5`@}3~?edN8Lyc_a9P~Hd2`%rmL zl=nmAeWbi6%e!CR)8suv-ZSMrTi$czeT=-1llNSCA206{<$bcePm%X&@;+VOXUKcL zyw8^Rx$=ILyf2XVMe@Ex-j~XIp}ZH#dx^aBd8Iu=%5X2kc%t1rL6r4m>~zNXFxT#r z7(0@&TE$^#Mm#0R>lnN{)(}y7-LPfjAhX7w;6keF?0eddzZ1N82g#A zCdRr#-`IU8Vw1s~EeUv4&I9;V_g{=%9w|-EXIg2{-)z@{(eT; zY{ouitc)>!DNor7#$IFWRK}iT>{7-aW$aGI?q%!+#`yV=vKGd!VeAXW_&LS0-x$Ne z+dXBmPGJ1)wz9sAoy6Ek#%dWG%^2&ZWm6a{WNaQ|M=@5(*mTC$GR9x|F5AplHe>Rs z$YjReVj3Q|-Bb1*V+o9Pbpi7+mcp2Wv1yF)YcT>XE62=W7`;eo3W=Ddx^1+ z7<-B_2NsOGn;7fD*qw|eF?IuEV;S4R*eu2_W~_uUoO9h%7G`WMW2+fE9vCFW9+5FZ z*)1qthZdZxydtPXze7-szD3Z9`VE3k75cS;&JqiV{w4!Ybip8GoTNT=->x!Xm~uNvaJ&(~;;a-+cG=Z)%Aq94_s zKsRV9L`|CGUmESySUAae{4v$bMV}Wme6`j6p-Qic8^u30TFddCReJ(`Uq!qNG{-F( zZL;I}=qbc|N(}v8P^;)C)B2;dp#+8d#9=?Ev{kT8?lr~rZQM@xmu0CQ>EKgG?Z0$e=U!6l;_Wv>C#zZB3+D#7Yp|_I&BtRe$v#4 z;)ZNGM`;w>bb3%d0*_Q_J%Mi4`a&hu^fxqmM9U)jNpm62DS8gGE>QjQI*7jMql+PD zFIEQOBfR%15W{;eog}hp6ZqRC+>pc9iuj$X`k-(}RC-z**P_z>%5)s7RGrtTbgl}X zcB(Ffdrlq7ymaJgRHpK8LF4rI1s$R65){&#c&OYc=yajqC1``tZxt95i||~yP;eCO zD}fGx3+0KYxkPibKNobiI0eiw%||?|{VZHpDHs*$l6Ek8bnmaP6_n#c`qN^G=DR{v zkZtAfqX_CHqPVVB=yb&|^YWgm#9gS;1{O-e>G8B($$TuHo>aSj5l5e?&C1L1^q9l5EuPjouMu$E>hfF@PhYxTV8>$~ z@78$w#`B{J$7gY$aLHxbe^vHibpM12^CxxgJ^kqwE$goSbdhrf@CMg*0mp?N|AqbO zd5;(Po1Tx6-(%k1yZX^>abwT#Pd~(^Z0Jwtb@BqgsB_o*`qM+5z3=p+tzCTU`_t`R zEaI;WTKU;XH+UhX&h(dWIq5A>&36S@Guv3LCI{ph3K zUijYB$BWc<^ht&9w|!E9ztMN-HT@~luM^RRe(~pm!+z*dJNv~W=F9yq<(fbpPaqiV z{;>!m_*bF)!fm7frqG3o(yY+sivFg8`qp4&9`-y2ZiAKpbu66YXewE7_x@HW{W32EW z`l8zC(bsD&$m27|*#eFqop&S1Ep8w1C*7_KJ?b~^{=m=o?47`#ONWRa{f>D{)t?a= zPpInY%CiEFm(>BxkvOqEp8=vr_WjCBn)pWH53~QME(HFf*2m_ac^M#jCY`1J%PH1uQPgF!Ov6Xe67a)Nr`rXZ5$VR#Qlh2$^@567f0nekVIF)= z?uaf|3tC!X(b9(wdR3$2yR62HDRiI@ixHPKSJy5x zXxJ2s=NUvZTo{r^725u>goVJ87_6a|<+s!JEn6qL6yxGjRexTi22~z`lWG3d`W;Hk zh?aStQ1M*4XQxWHDmrwKyOak}s&7?&od(e-3vm!_=p*QP__^K@^nf@_^6Tn``bad} zwPPEPnprO%QRyAwyg;Qh6z5ecJ*lXmyr?+OQ|Tu~2fkX3->A|hstfpLHPf!Izgh=A z%)^A=FdT=yQ^ib%vibjuXTdLO8DupCe=$V4%{Gq_nzG^#6#b{^99X9 zic4L~sdPLuD*NodZT(kyPlA3%__@B;piPB{&R2$=s^hru>mZ-64FZ0*7XKuaMy>o{ z>uDg}2krYl&G)&Jp4NUs29G&>pE~JHXZ%_hee9fQm9IPTy7yYe_dZVrP=DxAbwZop zCirbqe6OjrO^v??eXY%}51k1=C$x5%CUHHmZ}52S&eT}={?mA{4=%N!Pdq8 zp?W(lS8#h80vUXD@=*KhJfEN=HMk zQj0QZol0v|EQU<~I9iN{^T(rwFBdZ{S|_>|b#tw{c(+EUXb`;?egAT$n5fuy3qIA? zCNq6`I*cS23LQdolf8}!=+MmWkmCMc0ecR3`>Xx5FP(vv`x(0XvRFD#{{{FtG46G- zbWzMDC?YPTJ%XTTOmeqsG-Evq@FtX8tYnnVZ)xbyC*jQ*stZbflZu&0hL5Fv%aLB- zLZpX;{*(ss;XW0(s-LYvmc;|#s_A!Vbe%Sa=zUH97SdkJGt=sJi18leaTlb0ofri4 z88M65V!k?`Av~`ubef`XRx#Z^Dk~TCSSdU8hXhrCJ2Pw?6%rMCF+)|HZHJ4aBBCNM zH25lB7g)S6BwDCr(YbUQlQ^IMD|2qPT$c{Pw$Pixb%~DEI5dn4mBT{yy3E*NWXL1jdhM-kp-~ zP|xFKdi-}PeWPNsHN$oqx=rADxMmTLCU{5W+H}ux@mRV<#BeLzFsyQUrCaf%i`Jog zxaf1IZ@Zg*b7GV|>l*sKi@tX8fJ6QTm=y@WtW%w|M}VoDm6NV^Vhp=L_=QLEZMqL= zvmSH0lit;t+=>Sy5L$2BJVJ7O)40kN81G-L(zSBz&vd!cRv06OnrGINKP(IP1DP67i z0s6ZBsK7Tywm98zG&y7L_RK~-_F8-o>r0hxQet7VxKDxVbe$UiCR7SFz$}{Wwax(^9q}6- zbb(`(r1hn>kOOOV*Lep%=~Jo8$r6uE(a(jh zr5{K0j<@sZs7J({FZ735ONFW>^PjF?AfTMV z$4ludSC`M1QiHq8r%UPOc=vTnXsfRd^bb14N0!jred8}#LZ9{hp%Wxi%CNJS(AoZ` zu0+pFjK5$By*cs9c&tI^=sz!}D~=j^XCXbx4`c5-YUug`I(vTi@0QZF^QVFQ-F!bN z*DOHHdl$q$el$I^Anv9m^u_{&{(iwA_+7bh0m7|alnDH$MO{u?LXRx!ylx4-x+wPc z#q`0V&Oa@p9~Qj=yP{c^h=lBk-&g#M?($Vt z{5CjNhrU3c!mGZng5$nb-YWoZKUH+O44FTEs`w`gw&@%ZvkA_~1`+cJ7`^_t-Zbjf509j02Ya_1N$(DcS(i`04T*VaCT$)XbK^{U zU}(%4GwH)D_m@XfYt{xd*y>#4{Uhm-+?anKNvGw-Jc796`)-^;mrZbg2g=0Q>!;(3 zOr5@%MmrBTF3zV@rkpL#&!_XJ9s;n1#YvxP-UcUK;c!3Uq$Y=VtCOB_@IDNO%OVKWnfiM= zJ**7Er;#Em<5#GIUV%cS`kv5ft%e!ld2R6xI^C?N!}DDoOH_{AclJM(jT)4mo>-d1$B%V}$6ux&gdP|9g0=-*dz2h#8DUWO8t@OM9BL*w+M?@@Uy)zUa@Jkfi z1{O<@Bk6HxVjTb>V&>P0)?x%=)4HD1QSXR$$B)QD==XKWZIj*}{zfDkC3A4lQPF7l1D=@o1!p2*Z?jx@8 z=fnUk8NLvQT48x7ZHMx(*sRfNjkN(uPo#e#?7zgIb5xjt2Yto1t3mH6knTMqY*Tk% zrqU)}$;$k&`9QA-=k+RLK-aUm^G3q{u!Zkmx0 zLi=$~qG!SHvxZcd9a07FO8;ms~)^x?_m{@B(J>A7<2-yGW&_IvMUs@&qX}?=C5Ls z4C5~C3Vq^u)?%dFhiw<6DUz^6zl}8^q?3qRBmBBP>cmDu4`?T6>jRsebQ2G}C-p%< zKV^##haE^*7mP-p@#$XA1g}`)7rIRHOogbo$I6bPlYRV$i3UXvCnm zc{cCy6J{HMZBh6qS-iV8Xt0tD!v0T_+8JHtHFeOhD*ddY1RFIs2v=&|m^G8PUr-0X zh(WMbU#URKA|0)_)6cLl`z`&_-cVV#VW{5kj(<0S-t~B|OQ4@UUf|cp=o@>}4Kd@1 zK94!g@nHge+coLl1bVuM>#PKNtXKSw1bU@c{13h8!(Q?5{kfO-j0CzNf#cqtP>*bp zhYwo8Z>Mtm>oU)$A$Ev_ON>CVtU#gf65|y!+joRxKHDLP~KTc?{zvE zyJel@A6iIXb$$@ZHFo_;d1nz_-vhNqS0}t8o}WY8`*vBgfFA6Neo9yL@BGw!`mBHV z-)7MUqtDB;=p`fP+}X5qK$n~5(dz^5MVRvkb-8*T-8IOyXD)3W+^XI+kFFo?y?7SA zaESN5S@f?%u0h&slDa%NpEf3Ogyv)~vUw}{JK!(+T@AD84L`rk_=1d0;*EuL(-7p8}&57RH7Ma_pnWnG;c-!zNrbJ48yOWyk$(%UteW%j32 zyPP?Tt~sJRC@&nb`0~Z{+4ScXIPRZ`wtRJFEGV1ur-JZNe(a<3>D&A@UaMVKOzL@I zZ+b(yLix2fwhMdQ1_Oe#$DZD_#?|TE1iHmD4)oK#*C|gW&}Z>+Xr5MI=f@K0ov!+p z-gH~{*b5S9V^8k`3G_^_yvuvjMG4-!6X@B5ZoofI@UHDm-z6jfukQ^z-RHesKY-A? z-0V}Xo=QF?n7U(kf)xnA_^krlLHYwwIao_K`qO-f^3?=62 z1niZ-Mo;y6{KE-!rH)wOrWnkAEA_Q^oV~ofj;livo(Rv>e=(11lra!{?P=w zzh_raUhdfmluvqc+&w*$uS%esd-c3Bfu8IYe@+6u(JO6p0&Pg}f^vC+DP)kZ>xkGz zv;ac`IBbpJ`%)2yBe>TIE`!OlkG=p?Uno=2#&@fj7Z7$NL?Gc8cbf*wDC^gv6VPvz zF<5Xnst^P3shxm+t1dIkz0hsSe@-)8Q=C#!UX)xDW*4qWE31RWwZRf2U=*#Y4TgY|Q@Q|Uy5{Dl$V$aB=7!SSpEq&FB$NLP z$$ysQKaKgHuq<;F_|FbaKCV0!{OA1@{?i6HyA4n5on-T$=^5&cOR@NmHTjR}z`xG? zJBF#L%)ge56|sW4cB8I4@ZVOZ1J?6_8pf){KMtbn->%+0hm1h=?uoA7J$Cz`dG>Ve z)}eU@$UKWiWtRktLnT3K!xEPVDn=TCit4gJQLuKnCn?F}nJB|>uMLEQNu#n2gsBTA zdpvnH%fdMx0~aVpqU_YebBwuVL92t;h72=&ME?F7OTC#F<(UO%#fs~cQ zJltBco@Be3*#6pW64#~O6y;T@KC1!$VAGi?|1US4$;4l4Ix}v2&30@9(+^FzcRRH; zUwe!0)pT~WVf!>odp1!G*fFK{fmTpl7A!t??`9im1g$BeV0o~{7;DT3RR#ARgHI+2! z^XE^h8-4iUGunD#QfkNH{wMp)By;=+{!jH8a{&E!gv$SoK4X@+O@jU5K4Zq)x6gE> zLw-dIqPD&>t8VV7tZ6fjKYsq)x~$6i=tz_@FN|T4Qd3t|6|6}~E)LWz3#HTqmz9TW zYgVPO!nzE@zpf~`I8>RG8Z0i(OdCDAq-b=oI6EyprFvB;J;|S(Ix0EU3R?l?Hc(L$ z43w-gD(h+kMHQ=zqEJmuXhpCjU`#9zS62j9@myLFtPEDw%JBv@mnUgfgR6`n!qymd zRd!2NtO9f4+TxUSe^GjA`l#ZJG=Jvkj6gW zw)xzw^7dKQc0;;CH6W-U!OCjZ<1wR_25XDUAhwHxQjd{(d=+Gfu_6#Q!ogZwYxtY1 zzIIi0Fq~2uDygdoa!EsV6(vShsMZKnR|hfP3ks?NmBE4nX0f0E^-xgoH*>J8{P!B) z)a>`Zn=7pjv?#(~s@FdiX+ofi>!>Cet_{@$q0r{9LKiSrl$BQmQF}F&fr|3uc{asF z`L~V~baQU3_N5@#0!#(+va8`w6~*pXiLBC@Dng!7972aL=1Gm0me6AJ=1iD66{=S zhB15MFsq{^7uA(l)RtF;*~*WHAFR6md%J20RHNm=g1XxBiZJ?XbtqhJF3^q*uEGMR z`f@AdKlrcHhzfhGa>OQCVJMj2R0-T2Yz|t4JbDxowvjYgw??Fhl>j z@ng?Dq2l7Yni>dmQwo$6;h7^i!S1B#D}^~QMIE~i7+R7H>t z?E)VhA4TC&*_gaDlKpa$G{+V=_!E<)#fa>sFb-gj39i7r(!tO*SvDKXgy70rmN}5M z$JLc{^;QOJ%R-p*YnIiqD$q94VL50spxA@;{~H5V+hA@hj;Sly>I^UA+QL|B1hK^a zQ++jm)f}nPM0fau>k!e}+}=YGZ5?fv%3n1ddg7nrygE=-UThjxW<#yR1XU9Jd%3P& zB?l=BkNRfCHKVR#Jj}sUtISsZQx*L8R%WQGI9N~|tYFQ~F2GEw z*G4PPMjg*?hX33G;qPt1y|r?CDfai~AqQNFDbHC&m|Rz7>YUv34Xb5} zLs;&?(lTS-bZLS~o){{Yg8zU4{!~}_-yiJu`1k{hOM3!O9G=Xp!rH*G*!z*zW}{ub zYMU9uat&!1%b?&J3K&fJ^U45O~LH2aUN!VxYh zo5D~zL(8ye;<~oGhrMRE-I1tE9>9U8&kFFT>M3t8eRu!rZeemkK}mTr6uZK|zgpXM zUQ??E_ci6%^~Az^KVqY*4)$6WrvD!fbl(0dEypISaro5fht~yaFqVpKEtuyu7^2xi zjm1SREK!)DYC^EulqCPXmA>~>u#a`l8khDIA^Q|_p1%rudKHZ1m@BK%xiMpwhoxzW zmqxrYV%dO^%|$2-)YXQP+SW`^2uh*a)?r**ae`H_38H8o&xBA_7}oh3$p4B8o{$S! zZOF+%9@T-GU_oh35Gw>07QvE&YRLp!>WYF8b1>DIHaCukF$Xig&1YWz)Nr!Jvl(65 zZ~m`y%++n%eQ@fa?Mg~YLdj;PbN#_gRaeQS2|y&X9&PF`(&A{6_tv${DaEXdWCJOA zta^dcjtwDRk6|+~SP@#0?Jqk_ZE~A-X{KsTZ|zi5o3zN% za40FVT8mJ@Tw|Dp&85AqKNTt4g}kD@xQ)y0rOm$#ewQTae3`3 zBLtpH(NzwvMZ>VDRxA$|C#5E5TZRYoM9jXH9=0VQcVpaGKK}A2w5x;T5VGf3bbx}2 zP;EgmBn?a?*~z1%cHhAZTJB7&jUW{jG;%{ED{FmzPZ|0gu8m?=%Uymnv{`z&_EsZ$R`NyySQf2+u%k|%` zM{Eg(>T213jfFGzUkgd8ku+K*_oQ|);2Unh{~X%nL&x09n8wHU+P zihabSK)ATPynP1;r3t`7&PoCFNGQl{rylHbwnuMg%i8|Iip+{sR99LGRje&*)Q4Gd za{mWAKBjrN#7#6}jhNohs2 zquA|;Ac6f>7>c~#uqD`Y;1eK7&fXp~3JbZ0%BxC(E3rbCS}w%H-&)i*yE_sdFI!=5 zC^UzsNm}-S`#8w1-_E@Hw+=+nyMvHa>px!X017wCp^5!?iEZDt^kbZ20i5>pXW2%O21>`IpL(M|O$Lo-;S> z3Et;9W_j|H53gP~vgW+aAMMz3L}|jd>a&h`_^MeIkN5iMycY&LE_r3$sJL&c8;rx&+ z`=>n*)_+owzxDg8etht&mB;P0_e6M)g!f7gvRxp2j!iNL0w&tgd#NXKQn2-Netx;* z)lpw(x*aYW)&;}W!OBn=tDCCevQ-t>0Imwugi3HKU{z5`u+|RW*-G5r54Q7~v+zh9 z=Pk(0Fy_phJ$KT?f`a6|NNsM*-;QTPThFzZc(yz!#nu`t>s643mZdZdtuI&>!g{pB zID;gqvUZcP6v0Xm23}}}yn|jEs>2GfYOYet_Q z%V3g3O`9ETsx@0$bZqcd7NH8tcm-b*?P;$VL;Ltx(ZaAR4>sS|Sy9NmudWm@e_E%%+Rwm*Q;KR~ zR7$A|EH7tUYg^AH$}u|RNjMnCmnqI9`bV`88Cc$;Te`HgJf*k*x@U)6&Cs@K(-NZC zBWY8^_&5%0AhvBj$Lv>a{+4dX8(wxQ?Hn+6aP4=1fnN!|E0hurF5`WBvpD~w`N++r zsA_|lM+@@lWHvv-Fchi@VHnm}C?1}{AciOj)vW5U|G_8WK<5dst}0wt%}4r!C825_ zYykK;5kjRILKF>ez5`4dg&l30D2E05F9&6*0L( z`aoeUmggVY^b0{zR2N=_Ni$SegP`n?R8CGxGIoVmVKi47$JK?<=xovoLX?GrL8Qu? zIE+bcm&2~WsqCN}E4;^wLg9FhB@DUG*m!wWHEIHO3vdL9i?sq+sS4KO+|IEkUuCe| zRs=g_Ul9sbb1^V~;h2WGk5^G%Szc=-29uX18{t(L(;)`cR&H!|IPO;tD-X1IDF~%a+%q(TY)?tDx(JbMjnh-pU>T;ZM zv`SSHT2aMwH*W*WbZza!CSrU+#quvlV+ zM$YJ!$Qhf>Gs8w=C~Tc)?RZM5UCNt{xS}j1`v)(;_8zS{~MLl_HID%IG^ zma9KZ8fIh!z-Z`@(0&q$o^H^16|kDN^8pikB}->(ICV&k&&85?-0a{*a~KOQmcr32 zhQcYOB`Ih~Y!pDdi1wDo@BKup4%C)$_>PxtH~a&q7VK!z*@ahC6{lF{wtdZ%7*n;u z0*oS@^$23B9%{^)n>Tk-!I61$jxffKHD=G7F=OhC!~eLoFke~?WmmFou4o(da&%x` z=i~T-DXh@srunfpnqztHVy}v@t6}zkKKWb<`B7n5r#I|Xx2zi!725`E+Lku%91jLj zFvpm~yT@3*K+DId#@Bqw5Oz=eBj7h_B7Dk`uw1W6HKt4UD^N>^LD$({ln z9tqbL6yY3tRYAB6BeI~)*YblTkM__2r+apYXzFbRM9x?PEHU*YTm1V@g(48F^K!#-Va~S}Peh%-kS47PSk-gzaBuFF0UZ zrxwCyv%I_}RK@B7&MH{-kG3hXo(JFu39U}$u~c1?Uu0sQ!i2`d2Ya~xo1@>5f||?Y z@wA0CwQ=dp$q7`f2%rgNxWwT&RLhcGMmRtgyrm{cguZ$rcCFscP@5FO8^+x7y(BG~baB$G0)UFDJ7 z+wmlvfDsp}oYzsUn#l^)%(~i{rAJCdYF^t@8IwY#Q`rt@s!Dm*nG-$@2;I$kihJq)K=PB`>)R1yc=fr zXP&lGV9&6W|9vOL-#ev^S1bP6eHNY#{=ySL?F`q}q(C`9hplQC!?HdCkn2^&ELNt5 z=5VpFbGHi89uItAB`+VvkCvq9gDF^+uS}787^iZu`y~g=LC#Dj$5=SZFb)5KvZOb_VjCZ(jgVN2;mf4tcFu2&d-~TxA6Ui zL{sF-|F1E`&*g;+kU)F)Z10Iy|2SyLR8?0QHOF$wf)WQMCJszYOzbyQBqsLp_D_t9 z?+Rz%-aUJDGGY=FW8rAKTyBq^=ui_Aor#IvmCgeaHD6+4LKn+_Fp=G{?0MMICx)4C z+99&-_cQ7$Cb5<<(dvkEd8v;16mott+smPBlpKK{pReJw^?2Q$yjY>;)6{kgNsXhx z>uIb?!piHvpcX zHXl>tUju&U;dd&2 zEAT7BZ!vz;@f(kyAHTu)arNRAmGX+QV4MZzi=}|OEq8P{J3L<2DKE=hPPRKb9h+`L zJ3ZTN$G7FCoxhbl<&HN)o?x3bVUei~tUU3rc8xZ@FjNggQ~QP?@;kVZBlT3+gJB_F zRw1_~lhe#6<12#80>x(U&VdOH2DDtFLx}KVI2<_|hj$JV4WBUbe~y9WesgFQmR91y zcR4K;;(7Ek)4+;tHN4)&+S6jg2<*wQnwWtn+s44c>bj~`JXxKPbfW){?uh)+a5+%0 zYI1V;+$o+lEzM4p56pKwSYH-c9yC(r=?pv#37&hB6*zz$t!z8%CgbwjCyt*7L~!(iSR zXXNCZP+Sqh5&dA!pv6ZUBaB1Yo`hWiw#o-egT_Fkv?>t~A{!?d#)+kYig56R9K*jd z8Alx}5AqQA?{uk-ZxWLwvGozVSkZ0+9v%1pI`#$gtZ?BR)Y5de_6JKQSJvj01h5r? zS0eC99~f)!dI+qzPmuetdsh@vZi_m}-kAIoGR_VPE%iUWnc<*EJsN)R)u3HZw*AD0 zw*4d;+WvMtTOYBZ?SG(nw!dxXvE4R|j%Vj%(+?ERPH#B6VqI!lb~%2mQ5A(k6|nG) z;U{>)W#yG@)5FrzFp|b`mmuCYucqZ_`Z3fD*D(jhdW5=h$FIP*sxhq#pgBB z>>c62iZ+ei)_2=Ag_^($JTcp*?Cx_I89W8pc*&a;yoiTg8XHyr>GdM}<^!Q9#iKOM z%A)dZf>jAKt)tI7cC>j(L_}RuH-oX0-zbC=W5#gPZja@gv@jI%elngk4Dti)Y@4!P zt6=P%>fSCE#4!AtNwX|nRs-w2JmwA=WSv8VCDwWaiHs~|C~Lk_VTbh@y1RUSvZ9<% zESpDESdTOc7pILh(ld`Pgo)5R!eiG>+Z@@xq)IloeK?Ep@|7-f7O~kiNk`Vzwo&ad z{RdQ>oOTngX;U^U&?z>QvS2s3P@jGw)+5XX~{?ydewAA#}jMP!7nWDd|Cqp~xzv$C_ZM~?=Jqmlh+Bt06@MuXI`74}E##e{D_HC`bC)|C7mk zU}333FNU6Xv^?up)WeU$#)4RwT?6oc1^s;NqvxK_DNxnA4u`bqg&|5f=--Q&J-<*KvR zZ}HDxaMs!Nef|{}f5h~ke@RX`Y{}6D@2*{c&W7`@yXD^d9^3ikQ*Xcb{vOggbsnCY zk(G1k*s0TwUV9EaZ@>4xoliaY{Cn?{78@sha}J#}dFr&KCBd~DF1`H8=bn%4G#sR< z^A|5!T2K;Pzu`JWdF;t|-h00_w$r4kCBewrTkn5x$IGv_ez|7dSzE7paK~ehKll6_ zZ%(=RfoFF<_x#iuGv_Z{T5!fW=iYMXT{|Ay`S`1yy7ySTG+Y_lg?(KKt`d5Ye z^es5?q?>L&`M&$R_vqJu^5HXPE?Bf==}9Nw@z_f*zt#HXw>9B&YwI=+O-{M?=DT)0 z{`{-&Y|6dpV*k1QUwrAgJu_x5TI_Vi`-Y`_{zX+NYwTg;C)ID5v#jokC!1e*?e!0T z+e1ddz*FAQPMPTHtvNcaz9BBMUGMK+-COPD5?YFup*dCIbT~VC^5Z)@=Q&lakH@XL zRHv$-FMBmzjd6&$Zu$&oZ|8ic;^-cmuT4;sRH1cp#CvnJzC#L(O6`~-ktg(18r1~H zDZi=n3lN>T^J4~UPPt(|gFub6J9U!bSxhdPzGp6WRD(s z$NSagYL8qy3_t6Hs`{^U}iCWwtk?S3UwZU4VD&{ej^aQ8ob>>gaN{>xGl|r2{9$cpR}^avYw_ zky?+)J!4Adc&E5MlPC3_;+hjXb+vP{r>}bW)GRg5<#CL5dRAxlirgjQ(_+_LT3Qzq zdGyTb#j$Hs&VAvO!#Ce^%4p{y+7ic5&ty-c-sP0W#la)A(auh}+y^fD!L|04L)=?F zT%DezcG6s{&ptytR*zNPPTzTjhr4UXM85NcUDe$tM>ci!&Ug2UoVNOKb=~;*Zfo=V zM|O>fyqu&aXv*r`{+)7kv9=}h{qX6UM^n~xnmB!Ix7puc6SfYOQyn6Qd~Jt7W4wiLQY&R?qczQ+3tp?yJV=4v)v-f>ez> zJ;bxtaX@KoN#z3hLbEnoxtyis&MmO~Kus7&h)socMsS&}=I4jPHfrx4v;Din4994j zn%hR72djmo%0tNob=3v6p&;A;3!rDTg<=E?xiy+W8}vdtx=RCf?qT%z8ioD87}+r5 z5Wg`pbWO`hWm{oVzn=?}={F^e?3&}>6_|I^yPJ<{{%F@x235^(6?^7W zH908>dWnKRF(t;|%_o8odx|2oL84#p#W6W89#inQ&bVdZXE$cSEW0I4MHPrmvE{crJpzyalMF{h~$wf znpS8|B}S&oEFn{c-F=kN;Lpybub3hnak8JH|0h>5cS+$chulQI8RW z-J(F~WOhC;miWlWd5$Q-(QFN615XB`v=V}w! z!1DQggjC)Y7wV*Z~V;)3+x8bv#CD4~1;z8rwPm3c0v9K&@1-=K;H)yXZC zZ_(=L4Wb^oX~OU%I%F$36g3%@O-@vXnB5(WAeG}C$N=??p2k@TlmjyziNmy6j3+BS z2xZoEmrHT>)9^XWEG^9?;zV~HUw3w5PU2K_^jI-gBPTu{Ul?h%N7HoqVt+mNT@$2O6C{45)acwvD)3qLV{6#mT$pk?p_D?)UN(fG&sEJT@=AJM51 z37zgBDgz<#iK*tYyAek2Z@z>ZFW(sXl}3eKh!y}6zDZIKSltt!O#_VdB6OJ#OK8Tn+M}-Sb+I>$Dj5M{71q<4A_v4bOD?3iGe2o z3p0@}LoB8S;|qTcuqqbKk|ZD%;=AbIBmH80w-K-j74Q*Yb0tyFACNALr_%u=D~Qf!xQgg@z}6G+ zrDVXu2tIHD*tnV~`$vt8bwqOk^G`*1!0OXcUcja^P!7Pxvrs>Px$9vI{|V&*%mA!E zhiD35{<-*WETFLgd;qFq9jD@O^5)=BE)4keX2sfW8-reqi_# z+T}O!^$O|%u=O?M3+Q_r`~v2`gLYwe3(5^xxC?v(=DttVXAk)P5d8*_cB7nt4WFQ& z0k*axU%;j>(VqY#U*XgA?Eekwk>L->mvr*~Of(UYenGzmG%%9t0dsYH)`Q(nL5~9Z zT!LO>=oa)T(>(%%PN!zTu7HJJe7+8_F;-9sU`w39ClBG@3Go1ZUGNb!z$QSif)8VL z6=VQ5cN3Hg*wO=EW(Lepz^5kJ-3Q-j1Ec}?QW;?L5J7`gof?J;$_8vc1YaNn%pZXd z+A=*!;3KL!)gOyf02<0wB`5>1888>HA%ye+sajAWU@KrXyK9gSyTkap9$>?A!JpNn zrj_^-8pGpJen9^}1ib~=e2Jis0F9>wZPO92uR?yOPSuAi)WUF{LM|8J0)-|5HU<e3ZuLMmjetln9uAlR{$v z8}3r*AMCyxpUMU-#D{t>1N7gkP$OXOKNV^MY`_;je*ui(qt0Fr>ho=d1_Sz96v_r{ z__sp&fYt9KJ;2Ke02{tm=p(@V?-k03LHT}E zs05Jkfu5Cseth8NbU@#);EU-(r6&MeJFC_Wk^$IqsY)eraNmTlodLEs;-iOv&9|uJibwn0 zs*;c4BM1-J+^o{YfQ>Jxv=y-7aED;gG*GNNq z*FUP!OQ_F!d^&9|%2D_p#+(n6G{lFO4pn||$(cuOAktegG%yiqet71q`Sit{^ZUdNP6Qyav6h3Zy5?%HI9%CNqg-0Qcqe#>*AjP)?egWhWB++~fxtlA(MNM7U1`-zSpNd=lcFOqxFeSrkD!5!AtI z#5si=)oVz}UyC%>g4a_?_n(F-=`_-e(@}$GkRx&?DGit#jC08qITvNA2WP+emHLhI-jX zTFZ4v=Q_~018ye=-AG!~ji`g0$w{}6yYUu6za$-Vi?j7MxNj%bkNG9{4$`Xcgx{UW z|6bB_?<05X11S3rj6Jbxxn;V*LpXfU85=OqGy@t? z&iygMQH^<|Ay#Njal&Q93*GM%juxMA=3_o7>?D*1%p>_-5x$quTJfvLywQ*VVbdG) zN^ha(WB!Qr5vtKoXbt^@XfiNA7((O@M*P9T*$QY35o+}ip;Tio@r@K};YgwQFpn5X z!r724RDTL^DDO?95OW>mci-bZ;ge!L`@>?p@mZd_A6o8jP;jAwdYJLFu283o5 zfiL{>i-kM4M2P$#@?Hi$mkDQXnV{-2;mR*Z*mCevE*uTV2q$6g@*gXlEr7lX)I+7v zTPlH92@weitp#(KuUa^(F?V79a~^7HUf_dLZ(6CnF1C`#jG%oS4-E65BkT?Ey8&t4s5qK2=lO0@bmL~l z+0ux#8x?2sEeLxD=GZ$C?rue?->Ha}$Km%3_1+&scD+3HyoiV)zejKo~~-X8EQ=a3{~;ZM3|#hcVsEjSg*SL=c=Oq za#b;|RMqM$F@FN)HXywQ>^yHmeTb7L|-Gs@{4P{I{wO<7%XLwF>#B3IiVo zEZnB5jet$tRA=jTs@{kXOSasks*#&j-PfqP@^4WU-)$=SZdY~VPQ<$t@g7yR`bSlK z$5VCZ?nL+}kl&LkTLd(O8&Q1rMU%Z>_WH?QI;=Mm+>X) z;b+xZ_$$J?H5Yj`B|lDc<#*Q7WNQAzfsHw&w(0>mBOo4vy*Ob;wP0h`MZk(kl zEr8A0np!;?lXec~^Bm35JVq1#!!$iN7h!TWS2JKMV16F-_z9ZYIsxfT)YQmi=;2c| zr5-v#(-BB(rlvRKYfhS_DTT8&y=Ath_~vR(|6HUqAGD>K-n3Lx8j7H&7i-Q)v8Fcz zQi(=JiKaA`ftNDyeyoP^tf^F~=|&autw!9H8nvu~-^qx3vZnf0qa3T@zXoyFYHIUZ z4c}GOoVn*9?gqp?Pov2B8pfDL4Hs&9WTWP7+Ni087lFr1G&Oe<%6q9M8aE^S7Pz;d z%-6tugQgVTj56K~_if<)cBFkf_`D1G-=&GB`vC9PwCV>mCGw=EHavqmcpf~ysHu%F zA+47IU*`1*;=Q6#8^UG)%^yguh&q3dV) zN@-|=dtK}|2EY8@@m~V}+~4v42mJlN<9`|ajRW`}i0{FO(+9r=gUmN!W9e;Pd2zl| zl?kzq+E{0t%O?K)kagFNmk)X-U-tRA1mLJ%8eq|TvnQ@Gmb;tjB*SuBPO?*-Wzp>- zunpsnTz340R(z)w>CeV5jEc{;lRt9V`7eu#&o-Aoa@p~#{$lyBN-*1Du9ew3G(sxy z7VChcU5#z^>Nf*6Td?&M(>=Mjna&YbIt6Z`IN%L@>u_-!^Ob)&WC5-N`8Vd9{%ot_ z%aC`1f^J^C#^$RYwxy1joj!lpnXl?J6Si=naP{kB@_DrBq!vKQrxo6?=+XI%x9Dsm zk)wSFEmWL9WP+H?fBJ}+vcmta-*8%l{?9f zX+k?*<3!VKSg?MA>0W8Yx9edx$UNqv>FYr2vL8B3Rj{?}8@>s&{iSmo=$-KUz{;0p zAYU*gQ8s>T!{f`4_kG*>#sOyijIjKp>*F=c-%hs_;W)p-fo8m;ta#r68vCKoLxu5> ziw+N!Se`YGrbFG8XN;ri6`-@Mil#&5mG3&vFNSA_3uJj!k2kTz!6v_E???ZQYWG&F zJhq%#xy}r4r(-8=yMM64+jJZLXwhx?dni1(YiABJ)3fEuM^-qy-`VkPm}G^w`{553 zKXMYa0|EY7eu@yso&!20Ltp<7<@XOOzyDPJ7^{6?THo*Wi^ZQMKIw;3_pVPn(x0u5 z3lPz6D?1H_JuH6y?)+}I@@rUa=H@d&oBVA(*(BsfOc*^+_$;~|-|jy)%wJ>1YqVg~ zTGM@;RUUgB+w#F!XZjl^Xye142O7>YJ?!*t*l?yvZ{`ZbHEf949=7~_8MG<*9Vk9* zHS+RbWWtD*Pjq>kFSWv5X2OOB6W(d%x6iz2rDKL_A0KV=qU9f5@3~i7>0ADGIc-Rr zP5(j*R&O!g^;enD4rfFEHGBK-lb>p3j%GZRhvT#LRD^x?qk z@3aUOFlzx0*cA9WkbkqKx7hx6J=@TTHB;JT<-1g-Msyb-OrI1@zZW#-Cz}530m3(1 z;f+Ut;EJyA$WD{qV!?(6(|y%&vmT=RX|9!y8EW5tYWqjGyWLNXL(FvT_ya7rJwHVI z53>C2`u=c{8lS`-T}(@K&T`v& z<@KO<#xI)Q0vhYRxv<^x72SXQ4Q31So*`d`yzi?Q!d5G<==i)>$QSPg@^$%0vtQWt z_^{=+%khlmw&xwzS7Y&urn7jL^6pz8xT42b<8LP22iq`T4Gj{|*-2*p(c{bJ-Ox4}#?qL?Gi&ew)bmb|aG{PU7c{%tu4)3nsD4eXKf z742`YBctihA$SJD+xg1KxS6S79bn1N1NqzX^*2?zn0#B)%RYSW zw!%&Ho8e|y>78e}ZMl(a$?fR&CaWIRR5ShpEBrI7LeHc~%yF5mWDWP7r(w%Sl`;mPn z@MwA-=*jz`pMYdoE>J&{@928>rJD47WB>m7!%TXk1y`h-^}CGI#5D)rJpf%9Cf$}t z{{pQOekB&&)??ON^?RE|k4`6=j-vxI{ah>mX#N{5z7s~7`A6r|V$tn<(vY5UZ?;I=rG~j$aWV!8m_C3qZ zV~?*NEq9{nqygC`e=OVi%Cg)%#`s!pxmic!Ym4Q!=h5w!d%WpQpIL5O9&7`h`%Tkm zv;Fs}cdMQ%N1J@u`CMYT53$mH#B$3%2wJn{He=!ZlI6DR*|uv$(`~y(G~KpqMAL1% zMl`(=#o@BlTlNdP-EG)xm8<$tlf%VUIdTBG4AJzw1JHSS$aNhZ{xr~dyhPJ^dDI)f z0b|X4Z9RKCXl&|>rY}S}hJ${x6@DRbzP`hc^Nps*AUw|n(R7~QSnfyDUjW^|AG#Og zm-&nikE1eD{v12b#O|IW)`tzXo*$@4GDC9gJMu-0xbe01X z^Gv?&^4|^Gu>H^vLwR^C7*@LimDfF1d{l$HqW!m9{)jHGCd-ZN<@KxOX5RT)kA%3c zhK)D*N|IEZd$ZgFEO%E6uCeGr%e@7V`@tts^dBvHD-S?iHorEkx8^n5KU%IBH=u%X z+5TuAdD*aeyXj9jBgPl^X};|7+ki7zd_~vKo=IjoW27%^QL?H( zbOll&7?A`B)pBAdF(kHdoJJK8AaqeDbkRc>Juno}Lmk0TLKi~vMl+?`NNV&MlH1>0JBgmS-dZ^m(<{&n5^e#rKzwU4`%G{r2yr zeM9koh;i)iYyHD0a=$x4N9xZvKC^~`&z^@&eHX`N1l#R= zDoGoz=Oo+he0WWU0Uy6zK7tqChx~T=NM3+@mEYb=PGjWYx3Ahs`=!#pQ3w8KN&BW9 zv|rds`|;8q?ZE$pPU7F$N&7>c#DARacKu3MVbQZsc5%a&Vs^W3t&Z`&5xk{-4{-kN zx*RTn28iBozf0P2{`4K8$JgF}_Q2N`@i5oh}4{6rIzuV!nuqYPK>Zmu5Dr(o;@12(SAFg};y&}LHA}PK- z^|sFc{mdJ4hT^{t6==t6D@u;{nP^DudE+y|C>i+-EXs$}jSDFV-s1hh#_!+1XN}b3 zx&!_1Aa6;=HHPce#%uFrb(Y8J6YB8VjG7 zd~i(WvArKeQhdK*(gr`jePwBc#>OwZVSz77?m)dhf`hb9uuifr-lg-Gzgug3_Bzzf z)ChLJw&Ps5MeXrhwJzPJb@Fzt1J>DlmGK#Fl%->I{w&Vx9Iw5OE+#YEKU@c&&~AT^ z@#g`5SHOE>zkRs0hp$DQyq}49*SUY4maftKGx8aElxX;~{kp?v!^46XA}Mwp{5S>L zqw?8tlNH!_b1!&D)My$pRq?Vzd@c~fqwW5KFVUo$KUu+a2PNKf4p-?>3rDnZ{~djuNf$% zUDa;mea*X{Sw5Dp)e*ivbY_q9zV){njc@1I)zatkvia?Ab<+N{w8uK|U)V|emog6> z#a}_D4ZpYe{jM(Uy*g+=UfOXD`Tp{0y6}CG-`-o=@jlIOUrpL^9~rTq&V&7(XI*(; zhu2iU{cLgDu!Ht_(vJ7zU8HT_4%&~BwvOU7Nqa|eni*#l^TBH%$`NGC`#5Ps-hO*p+VGls7X9t_>iOiT z*ih!khOztY7f2gkKmGO#r46qsYwoY}gky)YD|sWu@OSJmFSgt3vhRoc4E)ZZANoYW z{Nw%a!)&+rSH9n0#Rs;3(hv7y6yFbj$HIFVza95MoD<(tZ?BiW-$e1jG4R`4r44F&9_Se^=Q@*-P5-8vi!i2Zycl`=iu&k28R; zZ~L%)Pr07ZZ)f+N67M_lv6`>VS6nn`$;)?sQ0zWtb(HOaeSVU4XZ{iTN9bp@-|rIX z3-{WI%m?#^GFZlqaD0N5em`|JszW-s;j`@Fs zagwr+GECaHk$UJs8sGkobfVn<5Z`Ym!@rj`Ovn_T4&Z$8~_enfP&VzryQ>-=2~-++Y0m7HPw^ z)=~S24%(+l8|KgV$MuKn!*4%9+B)j@WNF8_8 zD9dFPmfdN`T%sh&xCfy4{oy{)(Rdz{cHEDA|Mlg1?SROye*0u;!+9{Da2X91jXW8yVZqV)E?PY>k{h&57&9xkMSOm z4mXv3zun$<`tA0<({H!;ogKArDDf}je9z^1IfwNOwqM11w&!;y`E1t5dG?FQb6BSt z_c+$Ap1<|~H@2V7`U=)Zd;aH>Phvftb&m1vebOsryALI0%>8BCzgFkluCo}~bUo#K zhsQj&+wc1aNIQH|L-qK{ZdmO6u=C68cHFJb;)NEahUWq6SxL=jSG5NBl5wv_St8dN zJpRFUNh2(S$i!faI*nub_?%&X=5K!TU8k^VM^8V9tgq(s@6BF+yU-qGz(dKnmQkjX zaW11+|27|1=daTKZ>2w88&RI};=MuctLETeWXtcr)i0sm=D{D|$F$pgTmE)FSpKPT zJf3x07g#T)KFWOYdbgo$emj0g#++Gvi)(d`^Aj7d$Jfq_2GQX6%Fc8#`F1=CH1bApl2YRJ6HyF&`+WMN50ppA zcx^^`iM)kk@Ke3!ZNC?6CWv#Pqju!rx9=kUm}|d%ZFxV7*K5ChL?`Vd*}ijw=7ZN# zlq+PzJ>_t=FONnklgTzt|GYbacD#o{S(De99xZ7g+dTSuzrV8?&#t>D^>#kpO}h*u zEPlKNwA*?1gLq+#ud{s7Ie^8R(vY?iuKwf$kaTo`EjSK;cV$hxRt>#jL+# zy^M9`SL)Z7^%|_BthZo2koE4YN3b5v`Y_fLS*KVZ!+H+ub6L-0eJ$(TSl`b&&-!`R zud#lg^-|W~vi_BI&#!en5!P$7?#Fr?)`M8@$$Av)16e0oAIJJM)*059vA&-59jq6y zeuDLjtlwZ=Wc?ZI?^*wcb?=hqzY6PhS#QcZ#(HPgLs-|a9>;n->nW^fuuii+ll4Wc zuVOu)_1&x=X8jE70_(R~FJ}D}>t(Dfu|B1gzO2_^9c8@*>w&CyXFY=TXx4|Zp2#}I z`WV)8Sf9&!9_wpa-^TiW)_K;?vwn^B`>dC;{n|7r98N8SF{>HfEQ{Ya+nra${j?QT)AMaJ%# zij3Y9|6woHdvwU@YLSfHGu<;XdT^?Dx_4NKd`(EgTKA()8(N&!bX6Ev-T1mF-W4y3}Loo+Es(YlsL}Od!~D6DACA&&Je?3W7Mu4 zN=tgV&}a1EbX9K*chB_75=lm!?h($Kuedw$SwS!w*TN=ktZ*3Ub z($d`0-VTk}8&a)}%}wpi`^{)*nK-3+rsq9+dQ)rTR6oX;+0$AZrh2Z(z-v0NZpw7G z4KB6A8=4wg8Yc{%GNpL}`j>obYnq!H#l8wA#VbBuHb^q#zTt^wt>)+lRF@ zPgTp{rus3B$27np;y75_nqk{s!=^V)!1UDCG|U`23(nEdFr-mtTH4Uk-))&Cc?T`A z_NFlnts`ZM&^jn=m6@p>*3dd(GMeITO~V^!G&Hpxj(zLIpOTXH${fi768g|ZT5uBMVMH!yh(>Nxvg<@!?f1smT*#1+Bln)Fb+=&|&zBmxqKa9D>G)jS~B)!2qvtxi^RUGwO=#;acg(d;1inVbqBE*g%-RV$VES^F>EWMt!{j?&fZecgtn8T6ylWTX$}2q z_ST6xxO(>I$bTY`YF?9hxh2YpouLFVK$i%rpf)S-^iN%h78?dAR^2| zs|+5EtF^9&fbrU8_w$qxY$ZK&e>@Y28GofX?oPf2B zJgf3*YCGFQY?wM>a+r6)aB3=$IExHZVUI=P)aDssH%rOW8p4??g*de?lr9TldJ{3- zLy~Qln{yyx=Niatxs#AVd#ZU__Whvso6nmZ>tcv)nH8f3!Br%NTlG_(zkx4><~^!XHi3 zo6NZA55_uq@o+mWcI;stFYVYYN5<~7CD)^6CVH-Iska1{bzp5zFWplP3|_Cpqh}yT zueSX7(PAgpJn%Gm&rx4HyRl)4p4nkrYfE_f*M=wEd^Szdvm*zb+0xjmr*a$NN(d@y zgC0R;Vm974)Z)ah-+oZARm*gF?QfV|n`&s8)+`6Vv2}JZpeo3Z3#xP#j6%T}j*qMj zct$eM^e9y1Fq7HJWK=BTT$!sVR7FbSD&0xYxHL8cDeTs|rzF6>t7nU-M5 zcoy17M7ooyDpVv(6&aZb@sAZNVyRq3G8T`=q)%xmRVHJ}SS(tqh{iBH_rF{?*y7ry z%6#v__`(ep=`p2V*&gvyr5pm86q)2mx~F8EOjo8dmBp%Db*5Kwl}xYPYNe`7kK78G zid4Fy&@WRJmr|@KRz{LC!ALC?44Vo&_gDxx8yBDii<^b)rnkXzB-zfV^$?Ikd|{HFQ-g0l}MtGa6cI-RLI{A zFxwd%hhCy8R7x)rqSPxbr+T3xTbW)lRp^8irv#j4`EnW}`Gor!2#!bZcRRH#bGWJe;o9+6l=P7|De z`KnS?R`STpsTGN#lKo41xonk8rwlNe?Gcxgx+pr?WW3p) zS(&43MX^U|rBY>X`E=h>k3w}ilC8@1%&eV?$e}4NCz&N>3gtpXLP^Oif?g&0NF*uQ z%1t1eiYDdkl3WXwX*s2$c^PU%_M{XmqY25hI#-piNEXGMljAMBn9g`4DU-5Xve--X z5gA!5nprMd6mdx=QI5Zyk%c}o^HoKWrR5BhGuHi2lCH)C55b;l?YH4u7q{ntAhmtwVt!H)~m^4*l1r^qxHjew4bE)pwqO@o}u-^ zv$bA!tJWj#)Owsez$GGaX5Fvc`$?@Ayr6ae547&PnC-FvIQk9xR%rh#YJL5n3VAMV zbujmWis0;u>R_vND}$$`?kf)%OiFu19+&|8vO_C_sMP7hwXSZe4CVLGjJXV1d-J6{~{lrG9N#RhZv=dC=`B;Y6+~^uPX}RY6hu8}vg}aGbOc z>RlaR_?dBQ~oJ?ej-#{qC4*S>Ig7Jzf1Sk_Q0Cg`2dEMBrk9V3%W~LOwMv)_?09N^q=Pf_`}HYXy_gs zFARNtBjr13FZ5IXf}Fs^nWStk}0*lj{;?Z*mNa-AlPx82Kj1 zOK4A$*OB7@eUcA|oJF2! zJd@nP{BZMq#N!P&E4QEO@n;^7?632g#skx&+(6EfE95?n{?ccuy|+B51)L@C zPtK7q6UOl=%KH+O6}D84oTGdaIZh6?Vm$JGP@(#ktC%8uW9CDKUF*$Xu z+IQPd^#$@39{=L;5d+Kp|1OOFGS_Lm-)PU0_ugLp6W6Q#LUQB=0lTPwjC>b4LtbH5 zwHL@&k&EPQcT;X3&^PjYF}%x`e(_L$+-vBeiu3QkaESI%#XakFvgSryV{?mJ@<(6&*bEz%75KU z^=WdB9D7XdpOUkAgdsekfA<$cJ}#maNY@g>U7kb_T@|6~5-z4q03k)>*Xm>hhnyjP9d zv!5&9NKSsCyzYK#Pm?E+Gvxa`u1ctX;!D-k<1@f&2Xn&dS)!ueKl5u_DdMNy$_Rr~G zBJXsd+DkvGeLgw%lk&y~sXb4=gPdKa_8)~Yp4czShmBMJ;8$gPz0Und*f;s4w~({seupq0a+;j3RR8#)YR{0bCMT-Y{+`*Zm3Kaj{q;~jh8*do zd@DIh?sd5OM|x9lyqxk>aXk(1;-j?nl~Jh)Ygy+4eR?Rdt? zbLpQXzfDe)x31OxbL4ATv%B-NOF9uDsMy1lau7cYHGiToF*?M7gksM zx^?XDFUpsYqiZX_Ku(bDd`YdNb~|4R%C!)$b)T=#7K2;d;(&RU3&ym+}PQy?>$ldvzscU7oZ4RbG_&uh z{DCm~&&%IYQFfiG`p8bow+e@L^5?V{$Qv}Ne_|K)KbBl1zdlTv+rg7Ti^+(hMB!qDd?E8jtT=}6_s(aiTK<+;M}&yW|BV^h=~ zZ&7=Jdeto|F#Wc-wJ zfm|Y=I*axeweLDxxin4rM{>MX`QBq_C!chza&)@dk2sF;W+*otuN))i$XW7#$cdTi zUvq-`=g3)dk^C(=I!pccIZ^$Sh#`Z)O}a&AAhe@`xuw>?|^gM`{!$=T7$50R5&lq=5B{$hCWwv?^ON%EoO6nT#E zIQ74WoI6DMLtz|`JbB%7m80_SQz(ZSAEtacIYTazv-0m)DBGT=`gE=G3FKUz@}uN< zgYq(RR{s49W&e!oV=c;;3S)kg$0gf$~OIXuRM-<-d`WET5SO z$G6svYES%2c``XgzD+os|BuxEmz&r>c^o;pMD5QAqyH4S?|kL>Cu)xyFI8?e{#1EB zIrvQZZE}*__hyZkC+|*oF@N= z9RE_|9dMiaXUVsbgRj(Hb-UV=fJ__Xdv>>2t$pnM3qNIsvOeNgSs3FCaq z{9XAk&nm|rR^CS#SA5)&Ykn=$sqwE9M!e!a%CFO2 z8l}8Ok^S$hJdKs8Rn1$q{noU+SMCk0NKtZwg~RvgF-AQqGgl5k|fR z@?+#^Li^ifG4ml$5f0~<{4BX({!7%qL_S6s{xSKSFv=I?$XMmgKURB`d@ealE|7EN zRX$<<wKpANUhor7RG#M$d}NbPpbVDkC*?P{nI{N82U(^`X5U!kZ&i)#;d*h3yz%ZIZIyuOSR|7v&fNp)o01EiOO#ahxr|)ywO*xPmxEH6U}PRdHk{Y z)4uN4sxOd7lZ)i@$R+ZN_EO(Lhscan?p zxp$PG$@y8zW4~2>ZnpA80_B{5i_63*-1?&sARj2elW@QyyuYQNE3wK419lb-S`Z0 zj`mMI9=S~Y;}@#_7IJ~S{LgAnUZnOWkN@HEw*M~ozmQy_{(FxP`bGWC{*cG^@0aO| zwf}W~)%h%4qI|3{uBYgw%I}kN|^xvNya)a>K|K}8s^Kt6_?k?(JyJWbA#BNg-~A1WN)-?Qrf5IJ&<@-nlN zhg7P6;###|M~+>md_OrwURgf(f%uW@)jq}JXUPTHH>y_u%nj;)JUMZr@*;AHyq$b* z1o3k>sr@8!@($$%+x1d?mV7L^_?Y@X zBaGu+%qwpwpIZT^A6Gt(9DhRjO>*$0^0xB17Wn7M=LloG>8I4bfSe(h=$~4s_RZvT zFVGjrM+k?<`#H7WK+cdC(LegU+SgiM_1Tw{4QvKUDsToF|W5LG{UhsU4rQ3Fnvmj&L|XAE|vcF@iJX5#%g+2DwDO zo19#%`p?M)@_H+2yvP!@?@!K<|3(fzR{J~T6nVXf>Wk!Ba_kfJzfgEZ+5P?-qCzJOhXUXHqFOsK|tH0NH zbIH4sFDGZnx04r=ZG7*NAEUkB4;s((N%AYSx020&F4^YqDYErfB!5Kx_hgG-{bPB5 ztC1~!ifqSgzQ?nEQvDB%pC_AsDcSUEE>r(rtLX6=MYep-CR@CF$!pPnk>~%b=RfFY zjc@&rCENLPHhD|N%aV5|ze~1!N@Sbg{{Pnac0LUx+xatsZ0EyaZ0A$(tM+H( z?MwcY{jEv1cw@;nzh{%}dd!mTdb^kWE8{IB+k6$tE6L{uaXt3>*FYl7=ezS~h_nX!Jqw(#2GmvcO$5^uEGnZ`hmnYkNyhgTsz93utb${3X z?EbVX+2%h%-k#%~Ot$`yBir~hWIMjslkNIjK(_fPkw-B8YC(_i{nw%7A>=0VIPyv4 z4B75aw~;OWQ)HX(MP!@rFUU3@L522j=f`T~i`ZX+Z0C19`6k-0Alv*cB0og?_hgIL zw^IAF`Hhn8dXAG{qJAvdu7@e)BJFd?Hvboq+s+S<`&DWGcD@WE+wmPmws;Sd?RYOF zTl|t|uddekcE9aUw%5CXWShS@+2(63+0Kt7+0Ks?+0KtS96Ur)CCSB~6M zJ_n5J{VB4|=ey*!XkSX+iafuEj&Bcgte5g=a*^CX?%iAMv&loq=a46puOZJO-$$NH zevX_azeiq3{)W7m++%ee-!k%AoFES%A4eWazKDD(z(@IMPA`AI-a@Y4arxKwGxBEwtY_A7jknR4^ zdnNU^*Q*HGUjL$Gdwrirw)acdlkNWfFxl=O1#*n}1rhCUi17Nd8zoOA?@zvjd@%Vj za+3Thc@lZmwKQH6c{lRXb+o^U#HS@`vPO$$jK|2goPc<ZkVW$p@2PBG-|BBp*%Q zU|sF6NFG7{n0z#OvrRPq<>V8|Pm}K_e@(8~RQ=alPvfsZ-izEyod@Z^6HmZMtJed4F`C{_=8)^L8$s@?G#8lrxt{R|x1^H0&GvrOS zReOoN3wiC0HGT_u2>B`URPyWOOUSA1HQr<79C;}@zO&j_=_mJ7*)8s-ycRh+SUE~w zbBgjT@;vg>!s{;=1oP5r|B5`HyyhlqpMSF22a^|(k0h7K7m_2VsQ<&{apWcBg>%%t z;-(sZ+^Nbtl6%ipPLlhR&mhkt-%ZYt-yts|_t;G1uX%>X`zv__`CxLAdDzTNa^ zs{RdfjQl$}LGIsQ`%95WlQZPw$T{+jfc|Q3_ z@>24Jv?(k{N9tws|c?zJc4{KIZJ+l-1{l@|JMA; zvA=4(1bGrUL%xuqY+LnTMm}8__QWk0U=yPLWq0sQz=w6TvG4L6-a) zdBj5PZ;$QOUL^kwyrO(>^*Ob_O^%W`+Cl9J^2y{B`5)xD<|`|VN@)Q5k$(I3qopDaAFO8!BC1$%Nzw|yTlwrfQoFHvoLw(uNtmij%9 zd+b(jPk4NV$DewP{`<+6l??@4BR{eLDPZJh-%lL4yXI${5q9%i;xY7HiErO`Tz(IY zZ)|!wL}39VKbMi8eLr#%^~PP*m!`lo2%eD@^f6EGGW7QS z)BW~Ry|L-Dp5A5X3r+A0g0HDJ&J5{zJcZD^eSTa|dP|gQAHmu`(N}+dqJ}x6(iTdM3k9>^L3-XC*cj5I}Bl|9g`Lw9Rw!@hZ+4Ln( z?{e7xGD~Tnq|s&Q?fc5Bj8eVv;ll3y;24jy9$SB@Fq`)C z#bxwo--mvR{TZ*aZ+U;ad2I1>UVN7k-@adcj_7ed8k;`p>0O52zK{JL^~R=;d3u+j zx9@NNwMP3lHhtXFy9~X3-}@x$jo%h_k4IPI+4seNW;|olyZMXDAI=Z^e)+-s>3EF0 zs?X1YXAnF>z41ngj>i*K_qO|m%iD;}zRx~E^f_Wk;6sW&!#GK}7S{kREq!kpZB8P*!0oR+CG0SBmXS* zJCD`=jZI(j^e%_a!=SzL9$4uIg>Qi3g}R?yBC_r|5C8=5O3py{%_4hI(Vu=fb1ge*L%{p8u!9 zQs^N4*30;sdSlaPJiW`%+xi+CAEM(oZV`6R|5rTT;?Q#YX&%4k z@u0)X{V(zO3y()0UhaRn$G>=d@Db(y^F6MtEw|hJB;{`;DBv3T;WFmO)*D$%^mu(V z-p`A7rN@Ye*H8EL3h`Y=d|SWdOU5_eBw3#C2_C=g@eXz6{?>oIT9S|gM*l9Oe_Jo* zBGF@f#&3A>HW^d;t@m@U=#j6n z>7x<@1q^)@AO#G)tq)XjlJQU=7n`1J`jkjeOz$%E zwqDl8N9*{FO&`IIqjwp4TVHE3^~RdX7#_d6 z^5d6r&Yv3bcNuzH?`s(K#@2sQ{tez8zsu0u`e145jo%XP=z4Yg?EPNLrUPGL8nXFRs}CG1E+h5TGbd|Tga3F8~DD1Ya4;}7r{@%qV@ z@s6j$lsCO;|o2GXIwTEix&?`8S!jAx;oK^=kt~2*Qe``d|Za!)~~yOdSmq0mHu<= z?@8*7yQ(iy|2_4_rsr_m#_w`CpVUvCspI$cMG6oP<98W)TYv98>W!`cBzB~rg5G85 zZN0vz{M|L2|7`jEM?!VG-eu@*eZM;DjnNkSU1-guJmoX}H$z#s8)Yy%^n%ZO*|6&}xc#!qz; zFXF{>8SzS7@361@9U1duEW2Sou0Qf|8G2hEaSZjwUDexqikDGu+*Q4;zxW~b#ycO| z@%*~^cQhVbzwtoPhvzHTbF_R4-s`!`;e2qt$JeR1^&i{x)p|g{=)d4F^tL`E{tl1+ zjpqw@biSlCT-*8QGUD0#l20+7@i6(jvm5Vfk438IBX*>qLcTeN5x;bsUe7;>K0JTO zaO)@ApUtaR{h)x6UlAY$jQngp&Yo$FZ#+=gjlaLg&?Empd%tGudp0t@@ugn82R*jTy<1+NNp6TV(8+TQ2>z}?ry|L+&=v)dI{U-rZ!06xBOO4Cl?J<61*fIWsY$#yp zU54J)SG|vVW78)>b-UhW=xsgLEl$z?jZGi*^e#hh>$jdty>VCd`G@p=@jms&rjKAp z3M!1>W%O_B!|pLh^EW&Z#_-4SaQ%^w%h20;w7*kt zZ2FAEKmkMVGW527?Jn}~82ucEKl)F}h60A(<*@%pH2;~@+xoXR3wJahkuX|&JeLvA z*2{f@@r?i7NxY&L&t=53^>#;`#`BHq@17v+=68w57%#>jt=0~}7{AM5zFfbz$oRIN zujxxFZ<{}tp||yZH#lAMGd6tzJ5o@gf0v=R^?@f-Zw!CrpY-x~8G2h!coFr+rgwk8 zME@>BZ|e^qHdpgEzCd`6^l!!D74&pzACJR`XX_W=!+6G~Phdw1D&*rb^tRq{k$PkJ zV?H9@e7Fp~t&e=@89E+g)5jzR3K;#n485(VJdb+guIg?5<=3eY*(A)aXmr!qP`nace8G2g}y5cPD-}tY>?)fm>V~Zd0;=7Fa zw!ZXa#y9?;lXyihp3C8UaeeBFvpHYn!-U;@Ek9RZBaJR2o~>_v1LGNQa87x=Q65|T zh;uz4zRQT8;riI8iyrrDW78+R@wyDXt*^b9dgH<8mgi^jV%U*_3i-H&tpdlD(GE?-qt4{L%lKlyVAd{XZ|?##=Xw( zcs>dUECm(vaT)p8dgud0kK=0$f8-MiSwZh|sOS3W4W=h|SFEu^c^m*wL1&sdl z04ZSfZ|k$aM7=Rg$Ui9?3K;q%KnfUoThIM>>WyLViayWv-v?Zv^Jm;uy{#8NfqG-p zr;w}^F!Fa9`P=&P4^eMy`b4O1*Sie8tw+D|g_^&y>64z`W$5Eve||Fc#-@*WdY7TM z_3j^|-ngrJTOWVTi!^`ZuIf|F|7hxsyQ)u9|2*}^rq6rhcR8FtuFt>m#k~H=rcZi$ zm!Y@y{g0vExT|`59>6Qq8+TQ2&kyK-3Fn{B3+Sreo;PqJ^~R=m&zBl$bQ$Bf=MxmD zH(udyo!k$-`7Agb&L^K|u&L;AKg^OXpQz)Hd|Za!o`(>l-gvi8@=1B|Tt+;5o_|b?k=~vk@hJ7irq5M{@;1H8=--|<@g4QXrgz`Jh5bA4hoHCTQw*EO z>x0jyFnvP1Z|mP>=PV52I;3={FdlT9BL2?`kcC_oAr`Y8REQEv=; zcs;;b3K;r7%qR6NS8zVP{tKbHUGH+3Kc8Q60`&pe`j28qiuLbum_MI)@)-5@{1ej$ zy~_12hk8B_W$i09e|uhv=~F6i>)&PQ?RhFQsW&!#!aM#hLvPPtd5wDG<*w@Zd~(NM zBYKw+&z|2hPV{)aF~%;AX9_z~P(kl9^!B`$`P3W3ANn+Qq@aS{W$5ksFuzf641ehH z+*lOTy9|ACBb~5quGajGO`r4hE<G`bAE zJ>TYY>WxkB-cO--8G3sjPF+^VZ+waH98Ep^Y4M`Y>jUDsjCl4uo%M*Z$hS>0>Hy)4L44Jx}O4>W$m<*pY&&Mt;N`4#&^u4fVN><0r!n*F!`$ z6w|v5y*;mJC+dw&pTv%%cNuznzEO&L&P`#0{Y-k$$-C-ugr zFM8v58T}{tyr`e3H#U92)4L3Pl+TMAd86iUZ2G9DcNuzn9@WXz8=KypuNrA|In1B^ zFQ7g_Hhs+N-(~32^k4oa&EMGcaqLJzh5lWJ-ky(jF!jdp$MqJM4W&)57&7$sJgu9k zH(q^y$LDj=(IX$15zn5-H9_<^pN-KK@+o-VM{yZ?dw$m~)Ek>VjU6ed(7(&j+w;CY zquzMyo6E=3?D0a6x4fm?{}PYC^Z4Lf%l+^0xX*3n_Te65JmLIogVD8*ui!AoXU{L2 zBl_@u!RML1>c!jq_VWHLei1uTP+|UDMtpl7+G(N>j~}0p_L>)Or#s5y&+{1ZF`lHX zxq@Q-xs3kod28Q`9{m{)xwAaJ#mivFiRUun+4I_N5k2A=_s(@3FQMVu@^cyS?0Iiv zMUQyKvKx*s>gin$$H(WrT}iz?FV6H4Pwz7H_B^?dsW&#gn{N&Lop{jO^XLA0m*#KW zRlPm0ZWi^%rgz7S{#{1@_I$g?sW!~+3eZ=eEW#~(M{@&Nr8=F4r>0J){&sT>YJKv-E+w%`hpY`-ELvPPVxR`ol z)8{?C%h22N6iU<^cU5oCU)bkf&EL4IdV5~Ojno@=Rd3ICsJKu2H{MEk4#$ql;5Ja)vo1q#&yzTjdgJ>$$tTGi5g+-ujCl4uit77ye8%t(@27vF zkMVgGyHIc3RlPm$;$-TLO`q}lcNzKH^D*wD-q`eMZ$4dy-kzuNBlX6nPkMTnp||I6 z?DzoBPd<;s^zP>wYNXL+=i3e z{*wSHFNpu9a%;5p!`~y@`r#$Atsh?Xp!(bT;cJj>{qW7nwto1oWLrOcKeDYKKAvpr zhfgQl`r&iQwto2KWLrP{4zjHu{y5pz4}Xnp>xX|rw)MmRP43-W*Lz#>AswHsAHETJ zGVME%ZT;}Q$#ZExf^6%Dw~!aoej3@<55J6T>xbV#w)MlGAlv%kZ;*FgPV@bYd>r{d zWLrOcmA~tFuAqHWvaKJ!GuhS;uOZv|;p54_Qa^)i>xZ97w)MlWBHQ}mcav@X@Mp-j ze)!wui~8tzz9QTD;gt_-{5pGY1{K89@Tho4Kf z^~0|v+xp@6lWqO*=gGEy`1@pAKm0qgtsma|5gm`MAHE*h)(_v7Z0m;)C)@hrhmmdl z@T1ALe)wr*TR;4AvaKJUBis7nPlIJ@Y(3l;$+jNuKf$uTrLBkA=TT)_4|6El*26qm zI2ztr^v6B4+j^L*KBjtG4|6El*2BC|*wudqyR)95Xp>s4;}gs}YXWb09$M7H%PUnbjnlv_Wk{TUhW1liV~yhGTXzqdV>TYpfGA1oU^Q@+0B z4394m?r42Ud;ayI&+6-)@dLvDllO!6{OmUw&lsuV_1k^_h}SQd@%m@a+um-W=3{Jn z_x%#|E<&-32sIqlzgvamZJ=Xh-K3)qo@3h`Y=e)fFudl=vN zxlZCG!f5UBTt+;5zWAq%XN>+a-$hUFGW7O5^53X8Hhs*~y9~WOzkJy9I$y@7k9c~Q zp||IqUr)VpSM^aoFTL^w?cdn+cX;o&jQ=A3c)w)NTiBdz&s*4qY|mTRmu$~lKz#T5 zONzaoV@36#$Nv!S==!tgt)KRy=5PFnuzS93@KQOS=dq1fZku62RU1I9{^#@17s%-{ozP5B-0q-nc<7 zGc%I)Jlw*Is78i@i%|1P6{d;a=8qR0Fhukvbn{FKLthxtuc%MU4F#CI9- z?RoAmh#v8c;SYTlJ5o@E`8f=|J^y{xf2!UXZqP>@f9PF?-kujfhI(Vu7bOM?82TbW z3K)8OzWfE$8^ezNi?X4BJJQ?p=wGAW*z`$=rI!mxQy|n`Mmp^7|+=BamKWKT!ubR{Rh+=n?C93U4}mTmHvXc>FYWkW7C(gBLx-a z(`D!*`~XQK^~UfIj|X<7pn|^WF!ZHwwEx?vH-;PZZhsy1U;Iw>UsG>vdbhuh`p^HM z`aRyz@f(}IBsrjf!~R23M*g`URevt^#-{(X&rju-ss2UlpKSU+`+Gp~7uEN9ljkGZ z^jRlgN=@XvbW$1%-RKJLNW77w{ z#YYOb20t7Q^&6{xt+zG*fNc6KcBGizW$3g0R9{QI@omC$RNAKRCwnnHUEAWhjCh63 zG+vK)G#}#~g*%ED@&3KkWyC8E)OeRLo-xLQd{SX{?c;M9`o#9Ce~Efy(?>kL%g`sO z@AEmAiE<+#LRrR+}Z*2Oar*}EbpZcozwf_>?^hrH#U9L)4L3PiuxO= zH(v9Dj?YI|Un6>#5ii4d6Gb1sza+okNxX=6{kx2K**$c8e=TY}V=;y2H*e2v$Jgah zznAK-Ha*$&35kOO#&}$YzC{0})Ek>V3b7Q^M?+GEJ~Bl6AMl~(Z*2OsVCdG3vjk-uUQGI-XBA zf6S-L7*Cw>t`I%u)A)l<;w8QLaT)QljJNz!jc2^QuzUX0cr4QJ_!Y4u1r_pj8S&F2 z_4pmh_{Q@)iI?>5-!3CwaUYHMG~*eg|9-MXupc zkOGE2vajj~eX8?i3_J8G*-*gHy9|Ar`irSIHhmOgDW;Ex)ZTxV`Yk`>@gkc(>G-36 zm(hPvqxqjqy|L-t^9_2Jp^s4i4)w;Scl+z8|Kh*2|7}0l{EbbYl@w9H&}RWs!012y zk?PN(-WYbwznd@g#?Od9{=R9?cYTX&&v%8s9zR&V5PQ9W)Pue#=TX8Py&j}EKfj3` z*QfDLUzT4_2Y8IGk$+S+6fp9S0;GU3{tV+EEqcT^hW-CEe$!{E|CM^<315}xXYryE z7X{o=K6%DFN%Y9axX?*FJ02F#u(W3D2PxmBn-Ct411KM!X#3Eo407 zjlbzQUX*AWRrEN%#eU|z&sW*P{f0$2>@xEa^WAxXR@#LxB?t2}NaaZ*P>Ss`I+*N&%`iH4E zHhmI1Qcz(&UB-OI57gt^?+4A__(0(~p;CW9kNJoyw#9Q9@sfUgj{3cLn1a!yozNWkWH2J|t!66Njt*bLx#v zpLG1uzsu0)sNZ6lj>ou7j~ywfLcPP#M{2eINz@y|4f&^KLot0ir1tzX)Zak8vFT&j zkz#t6(SJUv{r`)4W79`Nb-UhWSHJGhygx9W=_zmP-(~2F^goDtW7EgH{#_2|uTJy7 zkopMO^m$M3GV}%dKTEx_>64z`W$0t$wf~=~H#U95)4L3PochGSd4D3CKJV#WhCY~} z{ohQzvFVeZ-eu^M)Ca$4|Hh_|dwQ3l&r%D_#xcNzLf zy^jAW>Wxhw_3lS5Lm!){`Y)(A-t^b<``r+ar+R#e#}9j4^f>sfJpQ^KTmD5&ukHGA zIUN5{>adIGaX*caP4CWENAs7W|0&cP-_uz>-oH;29Y#KhX3eL-c*d4bG|&P@J}$fZ zp8wJD_N= zZ2G9zzst}Us6Qa6)ZbsorqAf~w4FaLL!X?k4p&fbYq2hrXyRp8Nh6`M8XH3g>CO^BB+A z^kE>zgWly(&l}21rYD;|hh(IHp?4Yj^!eIt-*{`OAv?7nUOTt@#n_P;`J?ccbo{hR;P zPV{bn$lnTf(SQ6x9sf(z8+Wz;0`=Q0r}-OqRiC^_`#+C*W7E6&c9g&QfBV1GAJ9kh zw|Lh7pB;bmzrPdxpXFcT`2VuJ_HS(ccU3>@f2n`xf2rTSZ^!xn+5Dw1*5iLQ^~S#n z&#}QvZS#@B4jJcz%Q&A)muS3oR$xA4)4Q*?(7OzM^itIyOTDq_|7<)t>ffi{*z|Gi zIQhFA=6{*?zvqg~pKN;f{vYP=#0&GkLiIONA0?Y!dvCv=yX@-wuB83@`akPGai#X3 zqTbm0kFrBB@^=~ir>K95dgB!$9iI;|FP_VYmu0-IMUVFj#W$wPoM?y9~>eS&&p(<99s%+T-OUc{l$xJl@3PfgbPYaf8RlczmA6*Li%u$MSX5vi#oi zSU$c|W)I|mNmJYLkhc$IypG3Pc)Y8}H69<~amwQpJwD&#n?1&3qTGBJdi=V_AA9`0 z$35il+HSnHJ>JseT|CCJjjn#O$7zqR^!QGXANKe~WyxC02Oi^VI?{ieJtEhx%NuyS zt;hJ-h3k)hCUE%_k1zN5R*xU@7|YDK@m}@#9gn~B7|S@g`prDv+2i3JALQ{79#8am zhR11-Pxts7kFW6fdXI1O_#uy9_V`~Of9CPG9{=0pO8MT7JAcc2ysF1*dAy;=n|VCQ z;}ISo;qf$&&+_;xkMHyNIgj7-_&bkR>{UMgO+6m$@i>p0J)Z9IT#wK9_$rTY@%V0! zAN2Tfk1vq#`CKU5MY3Hi+a6L*-ZHGEd8)PzZmJ*Cc#Pw0wAZldO%qxho11ED8fFfi1?`YV2`%L8&uVRh zF%7LFTN|c=hqkoT4r^$gFxg;utZV;b5- zW=Y6M!cNgi2$~#lbi=gP=9X|ULub@YsU17HrJ=5V8fLFCj8-2;nKhwyaI4J7`01?; zB5Q83R?J{|hvK^5jE0tpQ<`U%d5)gm)Y>?;Ts>y?G#P)H8;(O+!+~{Ernk2YA317F zYh7zY?a-!a(>3q5rqK;k{vYz*Y`cvlOV`YY(M6w&gxq}gyH;q3&`E`)mK2$*>ITaP za|tIvfCYddv7Y{X+XjbEAV`qPv$}g#R#2kc&28MahkyTD(;eyByA|vG;`&-|O#jVa z-7>t}Zoaa<@D}_}rqO0q-In1AnG|`) z{pORt?*Enb9X;TFbKG3+MSda8e7U)%FSP5oh5d!=x&2FT)3{%_%_uXsG~4uA*QPc3 z#jkp!cdPovdcE!c^6+L|el`2ayv8R#ueWBCn7_Z>f4->>?nwNxS=HM{Gik3k@`vWw z{o_CV@#RfE{r&8zS-u_aF9yDRrAymG`OEw5M$^_!O^23$Y`(8H&GMJB{`0UaYZk=+ z>J{tFcJycD_z#TcRe31c53)T6zkmC2^w+nWc02ghyK?jOru?cuGmEQsRZB~Q2R`Yq zoJe-zuGc+2`9=RYOg{OkTn!G0?>p!2MmCrKJFEKZr@>!MPnXqy>z(v(sqLv-Hn;0$ z`Bfj5t1CLW{Kje3?49xNH)iXOfBV-}*)+T5e!Xhs+omnsPQB%L@~gx8uO$cJ$IAbu z+HMbfCjRbN`C6iU*@NXg>BoEh~%rX{>FFq+ckn_DgWSpCw~-azFe90LeiQ2 ze5J4IZ+uaHUeQJQc`fbetE=r_^0L0#YdU>(B_p`H*_fYO1jFB!@?ZLU?A!HrzxpAs zAo;%X!1cjM!)3i?12#y?-Ilfozuog-xgPyeI@^3NWex`q4}W#V5PwXYwZC2=fRAt6 z-!bAp$G^MT%>1@QDxGN9{L@V1=J(8$J@dZTyYJHaa@Pz$;(qPFEjI>NEdOOTm%aJq zS9Y29TwcxccBR)m$TXVGyky<@^IDeT`)+k$wakyd{$}>&{%}(*D~`U|-v9YNF>PhL z^84~HU8~|#`Td`|e)jImvoFtHK6~-(*Dudhy3>2T`%5p|?QVH}v;U^DZ*iL@s?4Gy zNRud6@oRSZ<*j8w6X_xh%d{?&I4x&eQ!O*R|F-39zNL`E#?dX!F#<Ov{rFD-5q}2bR~a z2V4vPZy#QLJ`O6$!<-{r z6m^jmN$if3-?!@jorBVCl@>u(MPZsZSy=slVx__|&Kp&=xh{1b*G}sCl`5(vZ?tHp zoX4ujb&YgUZL6~?OornRgXK~r%2KY8{%es)!Z6WE*ra6;IEm=LFn&!NJ(#cYjXI9g zstu|lD#FRR=svOJ2CV;-{sn2JlekoxFP6z@(}nELY5%&H^CFB|29}g5Kl0J%#xL7m zb}z?ekfvGOW@(|)G#>2?KUC>r9!6Ev)NvMU#Z% z=2c+ZufLV@fhZ26SVj<}xlY^2e!yx2+v|PaIwwN-X2VWjuX#lWDvuK6RF+n;%H5SY zeIyosZh-SM_uZ+2$d%M3tXT@+qnP9RiUDbKr24z{L98kCPacp+hvrlhO zG<#j`?W9+$J*wNSbRt4&qp3EEO3r?nBeR-XJC&$Y`QmsoHMY50uh)ykFI$W~>pgJ; z(}`AX9db{_sY)WhY0SV(_Y=`dmMS=0Gacp>(2KGva}>R>X7j{l;ieI{PEsd-V_8ELU}8Gqr_wK5y8(1#y+bPdzB+tcVtunKu%qN~A*iytK!GW;EQ@uJ zIXQT7bMzrC@;0lApk`_OoL<^HwqDevc@WnssySA6iHd<3#f)1lIvbAJ>K`vB$;~r+ zTrqePwpU9eHl~cYX5@yzOJ=583_vs8GUY}0Ad{q`JPO(>53qI8EO$rx-`f3`{8KNj z^rVSUr&-w4$o{BI+R}?kX`+zZV=00x3~GF&P7qKb5+#as z)(-Y@=qATJ8)nY#d)tX6xB9Gl7`y3{58J)6w;oJBp>u{6hXFggsLM`eu%_8SsLKEr zi{285vH67+u_w{U1J(Oy8n2SNC{R#K&SRJ>w?fI&G+wtXR2F7&UdCZs`^k~ZLR<&& z^okXsgvDqv35z%`%Ot2oFY}HNE5+>dVffm-cb60k+d@^Tu3N-*TXjANBMWUS zO4*+*+F{EC(RXyVs*RdZm#TJmp~(8;y2Q2oCmw}}>xM_U+7M?E{+c2PtIX}p*+-+M zY0IPx^AguXo;bUE_R&~0**aHYo|l0xoCj$3(G-=dMc0bjpl<5aXifw-B5GV5 zCRFA;kTF_QilSe}DSEZN4s3+j>`eT__HRy+?*87nv)#X&rr1uEvoyIG#-DvJ;xVG#sD69;Tw?>T>( zo>xgzWnsY94FlYIPdhDbm8mAfEyRh7lgz9Ca+)OIfGI&VqBAK(8GS8+B;1Oco&=>1 z32s$UP_>=elv*#h6HPvXiH936{f9TNW%ryoxUY0|^HrL6mZq+&Cf9kSqa;O7b0bf! z(K^@uhtCgbzQp0yM0HH_7zTdqX=-fZx#qiY>^=J*V;v-Qnv{voaBw(ze#iV^raoi< zVHo8_+g5RwqDs|Hw%#!S(e@tlfuu;wtkoLD0aK%N+Ql6o5LCfKJ`m?LAJ*8KIesJW zFiNwNOW;ry#BlGq25pIs8Hj_ZRW%M|E1mE7f%}T61u2 z8~t*Fn{)S&PefUau~%d$$~o53`9U0kCHI`BA}t~nra@Z9DryVAl}-=h-FExu&GkbD zfg@Yt&dLxKaUI3}G@t$iUNM655BWrj)_^OciSwdtL+?tuV<|r0ZVu&-hkOEkw<)6- zH#(+E>J{!gKJmN0+U_27tm8%ph?Nc_{kuqkFD*9OsA)luV7oOIJ?) zy<-?XIMY2p(L`y9QG$aa4MXl3ck)gT!vSs{vKk={?J^6r5+`t;IX~?kQ}spg7&i~v zR8>xFUFSg%HC3f5AJnz_=IQ-D7Lb2c>w`ozU3^l^t!*^VDZ$CU_p3;;+5}pX1 z7kNsY#jeHE_{s?q>rgznsZyLjuc$tWm8J_$GJpj9CjmFL_uP4lC=93WP;7653fSUI z6UU4=kIjy?7J_v~;wTj-oBaDpi_N^`W6|y@W9*3&`-i;R+-}T6{ZIN;A#P-V4p-&w zPvh4EkHf>0U*B$O(Ri$NChk2CK(Gz*Pbga0zeH3L)g~&gqGUV`_eWWTZJxAva?%(B zk3aSI!8}~UZSXP50#zDK8Vf_=ZfmoFSw}uvyj*&xV%e6%8!8TN!ZCcj6M|&qI89vaw=fGyPrK_xqT@&_1&_%IKW)9MLovZ{)N~^ia;M z0aFc*cHj1YBYhbJ857L!w$8c1Fde;jJF+&yV5Fn9rJc+;Q&$ZOne%d#w6)S%DuHS{ zq6atA@+!AO7!bq|)xWoZiSak8_@lTTyW22{9A9mabuAr-#ZImDqHM)`iE97>}TfJX?3CimbzRs@=T)8ELp8w9r* zv%QN_8M;zrygGY-@cPZ#kJ6ysV4Tw7L3Ji;N#^3oBY zjk*d5bKvO5W_L#DCC!c;sBKyYA!Ec8(HOeUO>#-Y0|#mq*F~9W4J@@l_jP*dK&5sY z4a~URf}iCN=5m-qYR1kfl~&+O3&PVXtjpR3CkD+MvCGyTh~#@V-^R45i$DQ)!y`Ip zpt;^3caEH*FYKdxexgQ@fip;PAOYD4z2m|jg~3T&lX4=BORmX18;?|uC$|7Y5avnN z2@SJtMbn&WYjO{-_9T4BNoT)-w&lnsC3vDbY#Og(O2<@UG!xM2OTfDIpZhR(hizH2 zStSBXy3C*EFGS#IOo2?onU4Dul_e;AD9JR|2Pm9Xee#C8mpCC>fo||9^{-&FAaO%p z6lYk{qr6idO*zwfsq$2_%e{qkltTS?V!;8>WA0gWz7pkw0Br5AgRJjd9ZkYw@&0E0 z3prkI$>0zy^F5cUs=)-MX;{Zmny0E4nN-34c29}`2?v{k``c!z2o}#Kosl}Q|Avo- zWHk40bR5VAP;^barK-zR%|n0}6)s2P?9CWt33pbSgEUVw#G#vWyHRD^%qU_e z5;Q)u#FX0x6i6P&WL$U=7GKa1hI+<*mm>t&PhOy30;j5)D#?lv9|;~M0EKpN5^KEx z$OIITBO~f_s4<+K=)bVBD#Ewlre#wHNFuN4T-X@)6R1vnO<_X7EOyt}iN+?Vs8Tk7 z0D=f5A_@kIuQ_!cv`{IK6do=Trri+{bi37styLqc97?o!BUMx78n4UrT+X+u!hWl? z(>7@Wbd1yjf#(f+_fW)0K*%o-H34-nP;m!An8>Mn0;$l`Q=*;#+(AWD%|LnRTT+)q z(6K`z5DGP(V*$AMH8B$Yv27#obHZLVU|v-;J{0&g{wkb#_))NG<2I>MpQQ0LZD(x+ zAeb;6(jNKeZ-*zD6F?!DEx0wF7u6nR_;;EJ3VCziVo{hxqPaH2zS9Z6L*g)YNspOT9v$$0pTqpB7xwYh0K z2i<+lcw+NdMys2W+Vu^rHgvY=ECf&nACs?4gwNiB;Z1W8)g@ zHnYrTS>dMw(AAWg0%3YWBT?BqL0I>p)vH`-QE~L=yWTm!d$bHMBSIxeXk^_iZmufV(D{oVr3RE4#G0 zGNDBylUg$Pu^aE5gl_7%xvoj;`eD@>bYXEthD9qcW0wpvhZFC0rJOpTyqTJ3EUF zk(EIX^d1K_IImN0S$M%E&K@x~p4}wXvzweB$V-Omamdo`EFwywo3=uM!Jp?kASW)C z8B)r?Pwh12@|z`e0KAB{hcgn~mLfL;9ofE^&Y0por`Ti>tj#kyEKwWlj1U4DW+i|c zA6v9OY%2}622#CPd@w)dJepLA={nDvRYg$X5{~dyD1w0?ZA}{TMad;qR-!oN72>$n zZ1LU<8c^`VvUj5-sPl+SNJY3CV|vtDGL2H%OS9AOt#@H+ShMV`nr`YyD@`)%mHG5HVYsu4%;o%3Y*)dFdT^7!YMQeCRLyu z@GiE%jZkLf9$5xqeudOq33Z9d(Ak^xd*42LfW8MAK^?*y3Ahk%Vcoa+>}I2X zkn(XLw&wOh*Lzy8B79c4;2Yv1sItC-bdSzQQEyJRoAvGF6SC*9fg@b7pd3iqY#%&9 z>D6k#$Jqo)!hhYYb`}fW?}g1I*|W#dcwp{>e9LLSW3$bBD0Je7f5=5 zsUFpJKSp(KdZ~+X$TjK?iFd7c89d3P2-BNt2GW+-=%cKUAk!1jvyug!Aq-2PorK!NC zObM$^vD%OeBFQeiT9+mHM?k`o0Ju}?;o_%W#RJp58Z;mu6FC@J9&^{)Q*OgSiE)QJ zM|O8fjRkLeHs!$LO16; z4(_ViH+}pm1HqMwMNML2s>;3_-YRh8Pbxx%&C*|Sd|KC_jzS_lRSEJy&3u}*K!T9D zYW>!pO(Sl?oP@f!;3}(mfzyZ+D*{Ksz zci$6O;RG)=F{HGN0INIo&%rVV($r41NYl@g0LAe^mmz3OW};cbVkL8{g4Nz-5Vo?R ziIRuh3qgTtL|ob(txH_+LKg!hH_d^Lf~#;Y_)AH73M`LUlkG*I9jw+PTT2qc3`Kzf zn1i`!oyB%(-+(9wIK0VNAxc1+vYoU`TO%@nR;3eCv2)sU2m8|28snHv5+G$nt`jFK zFKrFQgYZ+50=p-d#G}PVt&PZxD(>*8gdqYW5?`ykP%~>Tx?9R|G?8dMSA%4P7WTP_$<}?#FsgaK+hZ+G5ywRQDu9xxDSKIHU z@d8?>*;^cf;v4g>Pb-msPOnuBaxwvnnxOKvkvDdIujQT_D;#3zS2(+H!|pNFG#q?8 zI`>)r{Hy#~`rUhoJ!baT7a^y3yMg3T9&lrKKYm?rt8#5-^X295E*952jZUi3-mS0k zOqQZsI_OUn$?rG{F@39QuzA^F;S3_RH@?%8APeWy`+6_@J11nP<-;WSTFSDs&V4r{ zSy8DrMsfj?fr8A(sJm@3RYPU0t0?q`0Yn7+Hw|V096CN3zUU?)Vh%!u&@r1r_Jbs2 zS!y^O;!wjKmYc1oWg{Pr@^=e9oWM6HX?3sJ$m#{w5xC&|~gTGG#G#vlu}M)@#`S!odk2k&E0HB7HIXZ@`L@ zTnJjXlvpU%dhDgHh)Fd_tdHN7|BVJz9#-`i6dXw^7lt9v8c;*%5W#T;Vl+Z$^+Cjk zwR09!0hEipCA@7=9Kem^H|+E)@O*fxl%-IxfNNpi!vvo}3JN9;@pJ3GB_g34JKcqK zV2+e({`oF6aNk>WXWVUo83xTspcpR1C@6H~L1b-R!h=YJdEG$&f7gGQn>;@VZv@d)nGT$*8Mq)#o)${KQ>=dK=Odz6@@3eM>LwE$ ze$?WJICTIn3Gp{@IwIavkOLfSpR(0OEx7=6Qr zMBEX;Za@ktXgxReGOUPE<$h2K`@{@t2_9Xz(m;*@>`Wp+(Xqzk3liIh&r>TjXQt+1 z5;@9JFjxRHf-D8%J?ff@KH*>AL1v*pZntX{-CQ?>R)Km!l`BrcGFPgAFAl&d;Gcn8 zQ-+o?G;3Cj%MZW&&(|-%e9o3a3~YE|wB)fc5=mB+fSesrHWTjQ z608NP_NTFi{a>cDHyf8B9~E3dL^QV~3fGy)4~M&P8`Txeu)tkmI_TU^Ub%@+WMTDf z2B3)p4;#27kj)sQLe|syO}iGSVQYIL%-sRFk|n5N_Ubno5YvRNFpmTAIBV}~^G)Vc zgoXSQLTVEG>V1mu4U$$20zvc-ul4~i2D%5r7$-gMs{YpGT>_hQF_w#h9Tr0GL8>sJ zh#;!Uj-|%zuRs}KLCHLb=In=2lfPBf287K^VQl!hpWJ;1XRHS}q zdI12z@7OTk3)i}Fc{q+_VoooF*4s^$EH>bN(#gq>homg@DD>RFsj|<~hD&I)=9m(G zLF{Q^>!t+|H0*o8@HJ`tpkLi~@!f*pWzsE4VZE=q+Veg;nS?V`;L=3OVga={m#;u= zFT!)fE!M!5nr5WIl_;CG|I*x%@n>}i$YHD-)WwV(`-V8N?tDa7X8YCBkW5}0(5M&nTfk@Ft{`#9CzX35P6tM)i zk6;kD1Z4Upd6g6K5=t>oGpo|^M+T~>3T{m6OEwhyrh#~?OtsJep^kVJM&|tjV0eo5 zgo=t+G$<1c)#D=0G&Y4_XfLaK zu%r9#_KS?O3M559h!u*9GA9Ac4*uU^oB=u^U%7&{lRQSkLH5fdFZ8|X{z*D4C5V+v zA~?)4v#pJM+F1ZRkY12Bo^oe7^YAD6v{7$JD~5?7p^5;;z@5A794RXz2SPg%M+rWr zpjz&1Y_etH$R(8p^#FeW%rEZkGbgb44cDJ>u(=Rqmcr@+yIx2J7Hq@rsz1|;Qg6!D z&M{47U-f2r6NQ`t6+m$vXBwCQnp);;vU80EwrtTWO0{$_H$1Q8E5Ih$k__QyIk(A| zV+kV{NzWobHa1m>?g&$Sll)b!g}U*J`tQ8F-U<&LrsN#2{0kvA+HiV+yV-LS`;V( zketHJ5_)_3;SG`fouuISF=mOJMINs3@P>dI!gv|iSY}wPt?yBfxEK7rA@22Lm8AR& zpC`^*V6#c-7qR~a!7@`0$#^OJd$>;^0)*PP;U?ouIk982{VgY_ea&-no>-d+FcIHE zgZmaD9aN)ZUYEI6FlYs77SHORJ!`k`EwtD&M;?}BQ1n7COO%b2J)Z?@v;&|LDjqXLNFeX#{T8ssw(kfE3zAcmAAl+o;QP zh;}=dlVykAA%pcD$pyP3NL|}>pR5&bUqjYv`ThMq)HyW6*iQh7q8NeX_MyeG+Ai7G z&g{d{NHcL0X^gWFrnbg6+o_S^R6iHlX7?IW9ZGh1^XgLLe}yQ>pTj-a?Nt7gr15^+ z9&%GBh;nSal9N}*ro==caX4+=fr=-uka$UW&hWPrZX>cJCO1?tzo@aJJb7G z?=4#ph_`Ts$aM>!I6lX~s~Qd~D(@2hG!sJT)BK!XLEtb1W!}Nf<(fbynP2h+(r!cG z!PM4~3p90#ob#KV`+}VsBH1elTS2JG51aq;YPCBUjsaX-E2*IP-x7-fOGM45gMf&H zjun^#pqk%;m!<5WhYN5hv~*&na7Sqla82=3!?^_*LFok zPJA@xrxlU}n0impWBp^XAOT1J=#T&%alh+_>kEgQ01qB&pzOF1J&^g>m#G!zRIfo! zM@5M;j9Zh`@yi$f{CjyV({Yo=1eCw7uGY{oK?lnb!GFBde{pq~pDW^(+f8qBxO#{@ zp$@fSRv>%&r$SIBGxoRZP=C&4wC<}m(OeEcJfH&m`p~xIgdDpyc;Lm3YKD66o*6<* z;M>fFtOEG7xRbn4?&d0-t{b4mUARUPni>jl(HXj+!F)(by>cTZ1|_WZ+WgH@MlgTw zN&5^nSkZJ>hr?Ri$NaSUwdAs(pqP-_q&S0p^z_)jDTrj)zgOSOT`SA>lRC(P0vv2X zPL(y5s}F5-9i-aBSmSqVdl&PSu`oO`R7ja5?k1xcZ26OkiG!y&97tvoC9v$tK;{bP z9-kc086e7F#wcaDYsIbe6PEksvl~xm;g>zfVVrVef+7N4Jaz?w@;^1J{SzaLFc-Es z^?M_4B!*;yNy{Cuc4}BwTe;^J3pX7|fotNn{e9{>E#xM(Bsk!}Cz3f4#xv2MnPWcO zzCUHYhvHIz&XDg4F+Gg;9rU`}!87jQ*&RIW_)k4|e!_Fuq;c1vPZGlP)o40}lo-!s zmkjo_6=HAEpz+|7Yw!-CM)|$3WF9=Z#F*~!oNOzAVQ@{MaKa=%z7;6lc0zLW_qeFo zy_rACjJF#ZUgMKHo_s*VO_TuNN4Lj5rR*A%hvUJZ7>oyFPa$f%(PKY#+}X3E@xw5+ z0|txu6nD+&I=2dl8z8c?!TgF+sHI5uN}$3X4etLh(rSkQgf#=2$3GFkrmI zMxfg0fCLf*pJ{I+(%7-nP@4!~70?}zW^|_ZsA)=6Rc$j-x;2u!t%*>7BV|ZRAP*Y1 zHr;!k%+jiFmaI(FJKMXa)n(9NJrTtZVXhgsH;v-Y$nm3!Ob>A*5t@io68Nct(h>A| z89Q?`bmQd@GVIU%*u2A7b$!I4NfNuHPAB4Q8!K)zqR+GlP0?a;vss}_xBSVh$tQGJ zEIZe*VvASs&NNDySjcGb=(4k`Y!&&wnm$oi?Bo(kuO%%<^bxke7=4gV7i(s6don_9 z$WNl+O8?y$gsvpci5^qsFdwm^pm3*CG?tm12VDKEX-v*Jw7XeRMx-v)WBCnMeRiVG zeFXS`6j)n*JfB^h5@#S$cLE?%PG)kF zD1mU%n53dNd)Scz+D{Q?B3B;}ESSm`E|SqcJN2pFpbWK^`MXIE#%aF%2HCopB~q97 z7r2M*r(m1FEe*p@RY@_M@mZmW)p%4hTkeF&c)cNr-P?MSXb56S;;aCGe7U_&Cz<)y zDQ)z`2YcVrR7Zl4O!5+?*y;Ev;Um4rxU8)(%GO6)!O9()ikzn=NMQ4T8Ll~!jCvVH zeEA+<8(iI1|9Z0$+mBg%BS@G-Z+zgBNEQb76cCpnFMqT}P6CytIz?v?y|hk+xr@YY z7Al%4@<9|VDHKGgJ^sWG6Cllan05iDAMFV#(Jc2z^W0uL-4*7(3G^2nOP{ z@o=a9RU@^|`fo%<^d2AMp66=vxu@>al2xJ_3J*aq1K4bQ7)kdU58B@*j`R+wu(z8Q z3o1zC6|{b&GiegYQ8@Lu$Ry1APN{^0<+PQ1c0W2XT7g{Byq6OZ$fJM{2Q`H4vPTRH(=GEykdv z3KvKSVwUXYN4CZ_P6=Tun`tUOc%1*GBgO2h0Oo;76VQogWWTgE;)E!6RElN8#S|ZT z+(=tWJ0W!7o=5)#P&M1y2)Ix-)FmYthQf+_$5$|yqPEsecjV^FycH#?*(Y=KV}jm$ z;LP7BSqjRT?pbvesM zLVM$jFjq)?L#8TcG^>`bb(-(0OyWl0WvXy@LT>sftpJ1 zy3Io|FR1gB2HgsL+~W%7S_mV%X{voAoE@_K&C;<}6U`1GNChOEi|fQ$!imw7Og9qa zc~$Xn3sXS^0w|Zm&o1KLB%qEh0!M9$H{TAYHyK%dvt#24{25>cq>G3OjBlWd?i%Vn zTimYnnjTtG^MnX8^{pZb1qld$(z1vUDQce=5H~bZ7c^~wmbx3H*KBGrH6EM;NeT5L zxV)5vjGU1@eLS>v0bsa+;tFhOMVKq`SLbn>Qdu&+iF1hoxUpg=m6)q(IPU zavbacdNVwHRfXnV5uY}qOaTCKofA~&j-&|Yx%ZWMe`_K#e4Ypkk&6s27JPkOi0<^~ zGxNIm!<%K-h#+d)ipri?oFGA646)Y;o7Dt)qQFLbelycx<^0QMF7$&n)a_m<`Ab6A zuxNpiiaf@p*J3OpF3dXZ{EoI@CyEZ2VP4Adtx=7#wKC36hDle~&h@cR4INM~b$@MDrqod74i46PT^;(yket zV-WfhBd3fJ;EkyiJUJevZb--~yOy|F*b`9jqY#lp;l#?MLw4^O?ZxkM#DL)HcI?;F3kI5R2Ur1rar6vhdj9AZ+*5W8e$1k7_*7)=rByT2Z zU-kvU?2Iv6Tq2BF9viT}8Y_^#jgDVz87-o>C zST!txt|?p%hPgAL%;}vrA%xpo$%#eGo5mS4&~bsp!SfD-2|gq{*^XdpGNE0fSZ@}r zE8+y4tZ6W1jgmmc>cXcz4f_|%5JC4yB9mc`w9I>(8(9;I)PYQIAZ#%lYfdQ6G>}?) zAzp!>5T12nxD?ock;xhN@=Dn5mHKe4H#SjapDZDH5Ca4nI)NiA>eK5GkCUB+2PZuQ zX{2LNldU3$keR)>8B(RG+YP)6lgyVUeQ<9`QHZ2gxh5kj11Wf6Z-n0;R(DkK0)51n zX>s+W%nq4&wodGh5w@>NQ_Y0}zL-qQj=ajO(`uV&iMcr3^!;F<PFUm&=$y+8T5H#Gmc@rX^vk)2GsGn;`vZ8d?jgsyN8?QCx2JP7QBT}f&#+i zO{W-z;0#H;g?AnU_h6_SBnfy(RMIra3d-!E z9{%KSC+!;MLcU9GDKJ8?#$>xz02J(Z9o*R7T^s>9sq8fQ@%ezwGQG|^&#r8&LCKyl zCPc}Zg?1WTR6H7)X*FAN#JFVW4^eCJ5~P%GfOsSst%2F(gL$jSf=)pkQ)?LVrDAks z%=0R@E6qeNds_}43Wf*iyCHIp>u@;ZcfARhFIgHmu`sbfJz?<2PxW6H%52-pT` zRfl+cXTCI;F|kj;0z!)!lVTXS=kt+-G2c0bgFHdPVs2aWVsDwyPUvI=Z3t-ufjE!r z)C0JG@<(B4hgAipRhR%2u_!w;Kl!782=U@mg$|z#9G2bA9g^iQ2Y_mRq@VO$WSA`Quc!#O1gx=Wp5O6#SDZ|BcBM0!zck^I>eW5!}WK$ba(-4z8}PN!>s@~9}5Y{-%isfK|M>IDac^) z6=FEMSuSiD1EVlJ^n1;dldq@|wTBmcNK2!BjpFsc%Lvizj1-OO5TJF}dMT z=-;Jg^5T!OM3Hd{UK4}U0QluC_%n12C256+nQ+!<%FSEJ@_$k7hlfL`Lo|GN_y*Y; zUQcyo0fXSJ7{fS}y@SCNQlI*Wrju@H6<8NxS1<~b`30XQX9d<(+@Mqjz?az%f1XNe zPdNlc`gxf<9+Nf{uMGqq-c4=7?)P}PfOok~8lhYu6RLOjnA7?g=9`AHP?+<8gbCGi zLha-`GuCVy3!D*h9bovBw9vc%=SJVml%~5Q>o9}0g!Gy;D!`bNvF5&F6MlMMIf1jk z>tY*M2BUxzrw)WYHz{Jk#pK0Mh!2zu&)%9sm87H@=P_sq;mSr2B|-xWlI>V-uLaXF zc}Z|cz+N^pCG*Un%N^A+*lUg9|Fbr(eecx`C#46rhv7^_B0>SbGW5R`p`*WUE4iGk zhko|+O(pv@+TVccrD+aRChYOl%IplKev0(565LEsoP%yHlwW+fe0oAonD3OeMB~Ax zh^XK~MWN$4o@Xmi!7&bhmT7fCEb9|APSFs;6QJ}c`T*}=@6LE^ z<7dZ3)feajA!tJSVFvZC)A>)IDcM)z!!6nzigr!?D(~sOW113BNfZt;!9Ex9sZ(%I zDWM}YcxtXXSAeTmXf|hm2f-lH;P#b^%=}@xUG9(zl;bAA4dG(Z=t%jRJ>%{s2DPWc z-?x7WkEY47aX}hQOcx(qg!NR$U6df}o!@?6J^HWL&V#y(1DHag)EL4Gnjp#McQ_P* z-%N~TMdFK!y$9X?k`vnGaD}BQaAcrSZ9YH^akOk5SQUIns7Tp2o~Z1N)>Eq>X>C{z(w7626hYS?|DMKB`VQZ z8Kv%Kdp39jawWjIZAHPpJb-)j{2R!VCJU|2z+L5Rqes-0wbl=JsiZ=Kj;s;_q;66l z-7N9AJgI;y;mkysxI5+1&DI&&rXUz7rb@K1@gbF?!`VZq{x#jxMqB1qGM#&>4V$?A12M`{RzXJQl zVDUEMBEjSRh-Di{ld4UDUZTB$?V&*ZXl{S`*dYt=TUr*B4pBZz4WudV z5zmM$cak;gan;&1W?}c))|MB{0DD;ySXkjz;BCBK#4hEAFeu>d_KH$U%UwfB8+ptB zrO|`Ga&7eg)qdc=^=_*h^TrJs;RfkwOzrMhIDhr(D@9g*)Y@?1@h|`RUb@^Pb^@Sa zvafwt!K<}wM*~d}5S-nVcfE90nW-A=6l@9HJ3^u?#)KKYrIfM`0n;^SuNh^c2bk_r zPn|%4Pkb`w?UQ7#sK7{JT=x{2Q)otZj6d`1j>{FPN9GsU{+Ngg1ykIIEwjtLhK&>I zV;}$iz>&jFLrlL+;QPpNF*#XQ{M-(>!NW2EYzO&)k_70l=lbkz$6{u&&u*|djBykI zLL}G=mru{}du%xjM!0*w;h~Tv4;GwFMCGB5Np-m4!)Wxf2kmpVakmj`#({-D2hK+~ ztiEN{+sY@J>0|#NxS|oPm_if-1r$m^hvb{5-g=tPdt&0E3PCJnFj2-?VE~-R_B&C7 z8tzR-GE<8#F*W^cQdGMtG_`ILa+aW8%pkSvc>{+Hcrwh+4mBvlH&=&HS@brH=Qj2W zist6Z24+hz!l+*Ok}3m_yf(_n!+-j{=YyftFC4w2Y*lnn$s^!hWXlab)u! zxIa&*Vq>R;OfOz(!kd&M1*~Glg;CXQ5hs{Ob_XoNZ>#pOJd`Vm9udi)KpeRv2K6=I za_zm8f79i)ynXg?p{iww%Zdt5pf-GTf6|GNRGBT1-0Rg}+w+aG$PGkU)DKlHalXE? z-|Hms6wD+8dF5C)0oNihqYye-XN!+}(R(4HLURm(1u-q?Upx}(ju)_{GGROw@)?b? zV<+21znp9ycxI=ISPO&T$8vXAmFw64CEk}%|CeZFk%X4o5+WoB6+GlJANaWj`E8$w zMoDVhL>7pj?Y^#Gavs4oqqV>nN#=`Ffkd!HSigcy`T>|B7IYiFMp(?qYlD#%(gJog z$<@rCk3CFYs;#Mnw1$#a5i1l2Xw(zw{ZAN_kwuCL&mk*9eT@dXN$;;QtHL*S;fNuX zU&$=ycoW#&8-b8%LN<`Yh}@|lr0Kc;yHBQ9`l|l6b3bJMu|=aG4YYXwfmhh29Sq7j z6XH)=Al~@&j5~qC3x>}b4kjVDB@PM19GvJ(1L1Zrc!SMW`pBkchvO(=u?wG7Jop}u zqnZjn;RuGMtN=_;4Q0g%aTGr6Hh3+jYcTyt$ptWhOZgj!7YYl?)Ckp@3N$UiP6_DL z?O=-%Fguua_i2>;{k|dG4jTZ-pBjiQ1i>gyiKmQMZY)t#IX{aGeP=*4U@Gw$uu;*#)&J#HbmNm}q zSLpx6S)Gz6SQC6eTmqymd(nsEi*Z?T#R%7u-od-XTa*fh*b-c}E=i8@2^X@*`;N__ zuS|XU?bGWQua5gGZt93swcOX64vB2$xH(Q9p}$#iN>u?C81qMC^z;K(fvM=QDqH6Jw!Qd@9x zIM+_!b~r?HB)ewDyd73QdQT3;^cpaZ8|RTXPJV0ogF-4I6dj@NhGOk>-#abL0!%ZP ztm3?p<1S?Aqc zx`t%ZLXw{(@d`*;l6y-26Ae+;S8j`LX3ko~;#a2lc{k+AE*p(LDoPd@o3_UOyGNH>%QkJ;tC;PHMv9N(7Su2 zmm0{Wa(F7HPyDwOjsW&uPz>oxvboFSnTE)`*i8Ih)SeeZw6G05o2V&?Gqy{ab(D`kxYl3G8fM~Gt_#z}JmEQ#vJp*&yM(J6DPch(3d61(XtqqC3^6TQFob zd$JGZln@+{WM2{)@MRI*v0LHv%gIU>N&21R(}Wt40u~hs45z=EeU#KIy`#FKyhr{I zcm={|H4sBt8R3DeD7SOInaIX+#W!lDOpX#8 zz$H<$YPzfQGBfn9CzB>hCDrf<%iuTHM9eSfg;a8G0l%h1sIeMPZy(=_Ftr2uubVA0 zV5#?Yc`b?4pf|<$PL?qV`!T^9|TJKH64OJso%pDmR}$)+(0&seWoGA5LUhb+$V zG-Er0r8?VOK@s#y-nLtl0ysL8Vx80~@k;j{Q#^RPf*=PL3^DbP#aS`>jS*O0zXwTw_`8;06@BOomlpN}TEJn3P#Y_oXp6y-Ko22Dc zd0T0deNWw86U?z^s?=QJoF=@0RBt}prgCShx!o*@+?aSTyc86!!f}QAMh@~l8gb|o z0@Y?K!$C?7#1f4+RG>!PeR|itIZ0s}Sl-y{jubvBQ&LC<-yi&H=QkpZO=c6=R`xJ0 zt;LWvkvG&cB4jPNTYS^RLFOkW_u31a2lPGG1pc!a<{~Qm&A*{Sxu$!s;cUjO)3qSfj9LUwZS_!x{B_bsNFXNAO2U5X)Sbv^4_*l%wrlTJL|$nX}2>>JoC z-fZN#H%0+`6#9b`0D%)OJ`7lh@tz9B4l)K8Q9;Hg6;rxk)g>t45z3khMuexR;D{su z#OH+nB@GX7z@#i>c@B$JsNktfyl2DaKX&x0lbN<^@(AnIV1%Lq04)}8pbsPFRu#8P z%SI+co+Gzdyp&6Rsv+BdG=he;H>VTT5QSRKDStA?l_}HCiJ*`)$yz~FLInWznd|-Z zi(DGK&K8o7e%8VOH|Nwcz3ZOso|8aBU=lXrv90Cy!HUYTsR(tHB}oA8_HkLNy^$W< zI$ZQwPY6g(ZV6j%O5B*Bv&Rx#V8G#`#Z*cn;7Irlyesa4hM>gc@~5yUX*nq})5^Y? z?Ra1OIQjrS6e_8vx}waUcO+dBJ%a-cO*oEK7Lnua1*`!DRX9U85iU|ijmkB^gcTP6 zeh~D|vgS|A?O%G=VnnmZL@!btmZkW%@@|puXr?fp8bP-6cG*_cO%U>1O0~fQXea1o zn5_WQvR+EMDDNsMZkdo6FU+ltS8h5p`&>mfv=Qp90&%KImW0yZt&Qp3!@W|Z8z}&% zqPxaqqTb6Zg&~nkc0MFQA#{%3uQTky?4%>(@JCSw8(MFW_TF7$-gNwL`8QL2xSvOm z6A&}t8zv7mJH?8mP-mU{qKeDi*QL!R_|Nxd8kHEXr~>;{jZ$RQ0S$#3zLPb;1De64 z6Vk30O$ES+_d`1*xarPfG3#yCR}!+kUSpFwE_PekY9d4LNbsYS4|KvHuM;eBvTXY+ zIKzJzZwM&@=DB7wo46wVr@AIUP?%Y824uup`b`M*Aq5VBIO9oZsqo;EAy)%c>I&N1 z$xac87^&%J^8-y!2&8TfZN6M@_j_?55_D}b`Adu&VhcW2Hutvoz7aY>hatC#gsg_* z8~vWS@2N=c`ak&2Yk>fo)q+*)_F%|aTg+tfH^9E3@-9ojr2)I8vHcbb7U}==nmL)S z4nLXTcy(FVxXyhl=wLivTN%Mk*hL5rlINxGqNU7tMO;^XIEs6!?+-uLnDDjg!N%cR z&4dsZy(#u{c0rDnyZxCTSv>4@)_@#Dib8J#>cKv}vH#4{03`BGlaA(eV~Qy6aVVX*ZTNBn*A{ zwr%#Ig`#!qE&paVWNQ6x*r=2pl;FB-(TeoV_y$uT}Vo0&1W z;_NxNTbcJdE>}JbD9E`C_b~|X-fp!}mVE6()A>912Sm(zZJcxP_!T5L(NpnARX(C) zcew4Yfx}RS+r4{?bf4O06UsRd8dysB88ptw%G)>Q064HsU=cf#!Crh6_>B~C%&yB@ z#{>%g4Q5_07t$T6V$ct6-`N6`==;(DE^_X5;&e$Jq2+*pnamP?GWjr=;(i~xxnpSo z!IM`)C>tcbh{Jm}p@N#)j~zV`>HPR+VM~C82C%Vn;AJ*bZZPA={y2R|Cgt9fUVu`A zY$2ivR?wnkX`)U)DKpNgU`6 zOYE-L@oPo7m%!(;C7B3M^-79g@iHfmH6fD(avj*v-JCiMkDU!8q^lbU`OOXTv21+6 zYjSMNqw{&^wIGklwT&UT56dBXkJobl0j125ME$v`4@oP;-!euLY+7OFN2?Vta&PHH zC!Bv9HJmgLkR+R}Zm8zh4TU<`ppM88&oFZ!V?6oo-pj{N0~!bt7P=*94{X7iNGCsS>S-mC z`64?7j?<)#;c^A>35iDk6c74cm~IP)boVOpTZ}RxV}d&v%I{OX#`}jX{;ZF0f-fNS z0K}94UC;^SX^)Q|J&qPPDa_K`Y;avs+Iqa~v(yJ^DDd}gAP<=-_s=%kvic;|NFgO)%Rvyt`P7u3; z`T^vW;iRRz+@S+Ci6@FKi0v^D!VQc+7|uh5#Bw)nx3CK4_pcDcZ4Un=ylzx1Q&p8O z6>xi?V#B{v!>#MN(vqU*O67yVj$8vX-Zt~^(?kg|0~@@f6_*6$UE?uy2~S}HYy@&+ zYV*K}osUn#ajXLV6lqDY*m~Q^en=qossW<`miI^>8oV4P+YSfW%0D45f4m1}W+mWElMluuF;N$4hQgFJB8_T0Rq zONly&FO2X$Fab7PATB@P)~NsDWEpRZ<76xM^3C=2@#TovG-0N+g524mq9h zj&>$J0^}V;bz9?lZ3#1aH=y$dLr-E{cBWgW9LyA*7pR7xFGwq`s)ho^en8`ZhQ1~B zw|8!AYPCoE78bs1gQvI#>(XReLubo91O+rkU4lox_dY3HJITcrH_+aat&1hiQe`O| z@}#RLknf^44^9p*B{rZDH(MAn!Dt^55FqrCBeET-q!bPSSKWIEKjzlGX+pin-b&#E z*n~(>hk+Xq?x?reCMGhX)aPS_!d*-f!ojMfuz(71MfzPT+N=yTF&@A|oOJJuFVJ32 z-bn}15V0!KMX@zt{yz71T7SFQ{3!uy=!_xBhXI3_6n;s!s|%Y9ZdGZ%E`}TqNGg~6 z@4qw+7@x~zw?4!U1;CVn`^U*gi5Curow4-n_fMUX>E3SOE(~k@cv6rc=x?Bmq;p9R zDNLAJSlbdJRfZ%Kkt&&}RHT9vfMw||79N5J?C|lr%vg%#esBUezAhCw-5T zL5H35H;tqb$=qO3OAvxFI5~f0t$Y`6(gHiD4!976i0zUX0)Y=gt4 zx^ZuJ$YpU!`>THYoMyltagNai=uHpg*ZKDU5H;Y={$o-};XVcc;}^=s!}>ql{-FOb zDp2JcCMO`{P2rI=o$_Jk$6iyT#Xdd%!!%Nnui!2<{U-+{?>im31vveI`2pAy;x7BP zKz+p)0w!G-GA;eiW1oFvi` zD9T4d6>b{Ry!-k|x06mU^!!d5Ep!Z&1WOaL?t%m7iCc#Y{lHI**P*Uq`ovs-x)0I> z^%T=IT8B2pTmn>&%N`Z@ffC7TuA(PQ?gzJ#U_&OLAe0G^QBsn4Mj}O=v}AOxSp~&N z4R2NtzS0+}lD#D|8{QkDp;=!M@GEUTar$DI`@~Q#6)-h(y(K6TFw?eUH;xFt3%=nl zZ#PwxWhZx<@^!ej(Ou>)AM*{Xz0ZlKxp9nl6qpsVO7S%S_<{a+?p$0llEL9FKob>^ zxRTF{PsIoQ6KB92FXV@WXVCk(6+o)u9M)wgRqdimgz=OCGq$coWh6}iZ!4-G{xG*A zAKp*`_7a1!XrN=qZS9SUhc`s#CON)!CzIQtf+Gn5rln>3X~U^F|m_#K-~KEm(Vk{odzmE~xppRf1pf`kR&##HJGKv8}eIZW|6V;uD3` zAhVlvFi;1M$he`7;dw;jBAmJfL!=&k32+5uJ zD%Cs8a`Rv`;#ClBTm9&QK_~B3wE!)43DzBu&F(LFpCZy^!p{8|JRgCutfLHLEq{ zu}kt=a9`U!`7}*aaTQOVutyUnEL_t=)MBVUmEXUJdqL0^Ur70VAzHd3U(@VA_MR}P zvET4H0XQNn3!y{6w{N!|qpvC&1Vwp`VxO7D+a*zIKvZs<3d2$FEy}fjCBeqTVR?tI zR3zpQyHBNV0K~_3pvL7bGYP zYIs6Ai~VIcgc8mKKY09mJ2qtRH!7`RcYy6)7V99($u#10OVSLGJ-BufWZl=NEfwFl zyM{@v|IGizpWg9@;at=N3xS%kO_LS{_*9Q(+8Iv>F;&=dC(zE!R%svv-|BVn21N<;;YkB%$v z5RwM>?0xeP1d-T9<1Ljifj`^#%)Q?e920Q^I39o%c{L$ftDw~G`x-fHk_k>JFM?V9 zqE)?*ao_|_<1!9#s|v5U5g5L7RzNm1(Iu%1M#(y#Q8hg85y*k;$iG9JuJsan+lR;}Cwn~Az#1KgR=7B#4mpqe_8 zwg)uOA}7lzMb?pzPz|sjy&iNlx@?LR<~?>NI0gt4ff4@t14fK+Gx4HZL)Q2r?kD3MVUb%0?%3Gk2;ZmEL-GN#wLnHMSQ2I~|> zP9&jvhWnrB&f)bU>6hMB9GLk5Zb}o z|Ed|FKeGf#3&n=CYoL^*;X1i&cCIf87U>^whIo(`@039HSUbQ9Wya;$_p}^`?+LTM z;gqQgV1DFuQ(e#(7aB{U+zJK*klhoovtgQ)n~0bW`Rl@Z6ZALW@k^vs8lke;9`ST| z96(x-u|VdE;EHBf$TqRWy~LnE@L|%usX4S7@1Q3qH5-h) zCC287Tzki!Fe%B>VQPTPgy4m0%=~HzHI`|IRv=;|GzOPXik${SR#*yK0)I6;HbwP6w1tCnB1z zLp0OYbd2G&_u+Kd@pRbHwD;lksc_nfc=}>8eJY$zVKN31*-3Tp;h;>|HdD!!!tn@_6V(atI8m1z&l6U?6hqDy{^MsK^xr=F;JrT` z-?xv6dywR6YH?Bh1t2UMZ7n$*7{}oHq?TYFEqsfp{?ynCTOpl^;T%bEzmhH%6*ywi z_p5~;4DKWldl3XwQV>_vL+JBciv6PYL0%Db(*yAgG~GDvVE$`WM0f|h8*>ODB0jjV zzW;;%(`Q@s^N9C_lyqtkwjrPvsmg*nUQ({o0AyiS^h-!OIZ?2X3Qjx&%_>xB&=%ks z0>evLB-M@j?1O%jQiT&0K|eibAN12h6i@+Ogdj+~$)s7766wsbudso^pLCx-`=B57*#}kR zPwcsGt4<1>dy<>pNrAIZ-h1Te>9$S9N$2E~0|NpM2BZKBQlN0y`$B3{bUWkhgMQ>^ zA4FBO-zH}u!NpF>0%wMz1LQvtTfni1>;)fGbcYuHfT#(I5I(X7WeD0eDSeB-iN3;T ztGk6f`=H-LXCL${e)d7XjAtM0M0AwH9t+JSKX7bO)j-EM&>(mSC`1&cNjI9a5Bg4? zeb9IE?1T63B$ujPMUt0spdyE|3>+mc&5)e3j4YNcgdnm`7O{VlKoG|+pfC^N5CjmZ z5l~v;GE1Omhcvbu_}K^jK0o`Q-)d(c^cTt52fI7@{F8p%CRGarHhKFw9z7!2c*D7K z8d6q(u*BUyKl@-OUd})12Xy{Pzl+X4>9^6@CsFO}=78fWf)*J{DT%+s_e3=_cxLfd zo5}-CH}ta)-n+$afzCGBZFV9?BvDi;B9P5UnmVywZf{W5uy7JrUUW*+`6vC3JO88~ z(b*?azP+$M)6edxooy36+qM~kD5W$u#hh`(B|0FBH*4`=mqhA7X&!Zmvk&_1E_W`` z6!My>wg~?YxtSHwa~Ky1F_4SX9rLpf(jt{ui2F@?_QBMB`|N|hlV=ri zugsiPa(y4qs-%7=pMB7aud}+V@8j7A{gywglKR1%ebDP==QT~gH0Pi6;^XX-coMBp zIRB&{(D^5iV(yHl>DQ4MYEidYjnJUHRTWg=<*0z9pmvr_Z@uu}KKr0w@Ustg^8Ngi ze%jAJ>8JgyX6Ofg_QCFIJNsm}lINfFUq7oG`mK5PLBHZ>AM|s1_CYUQ&OV5j)m>}n zbWQ*5^J<*mZfCU~+{5*1zw>SS5uMegyvRKJ;JsV)Q*^dP zf6mT6=*7X=2bJH!=b!Y$Ije9fe|*kAdG9tOKccg3;!(F-^Zb*38=Zgh-d|7r*U##Z z{`gW{hq=dRQ&LASA-_G$FrcW43Ar+)-KA)H%jR}X0j=HYFTig`Q%r4Yc14CRRkC3` zSYLl5sC(#Rc&~4_5h;SV*Ki_N{|k5kY&5rD_^Y9#5%^=$`$sse;E4S4A0J;YKm77P zU%&i9ATMrGU*>Rs8YxEpU#d|9kNJhgW~Rd%b-1`VB9`sScDJ zMUe8m6!P{MCFxc4b+wK^e)#Z?$7%}y0c3>8iHr)8?wsanG-3Hql1h0YrTFt^wXCmi zR!#Q#%ZE=detpf`KzJbr$DdFomM&Zz2Y^>qBoR< zhqFzSobMshf?=6^EqnRlkN00*f07r;L&zhzMRMq9ShLq=dH(agyxK1=%0JbA5|6KS zEQrJS-G`U|;B{<&pu6}1_&>7Y9h5^}K4K+`-#&c!$I`!$QQJ7fiwP+rG^Q>EV)z2& zM&$W{elP{>&v0z3H*Y_E{<8e|;{Dr~bSMKS0#~9ETzrC;$@qni8t7`XD4S+w0pUWwh3o(J*=S?(={A&ex$if{_C5 zA>q!?ArTT^s@Oq(lN7(ceYZ5T@5{U<%jG{_{|D~^9x350q)Jf>8Ab=m@NcTPzxN^;@oY!DQa|tKmERcy8AlaJsCjwt;I z+e$%o2A_F9{m2ngs?vvjfzV9~LlzZrps0gt`nY}r*34J{vyl{I1qdw_qCX#yByuET zW^+0_@kRkNtbf>u zB%g!YfBo?uC5G(L9%K{I@#395;U~_HR#f0nQc{$}GFZ{W* zQH?+U_Ti6rul!}hEAXbn;G9wQzCcBVV4|mqSvYMhij*`iMK=lARA75y_v{JzZWG2* zJI4~{#D+&wD$6f_dHY4?gX%Z1?m=cMAz$J>{V!4H%qx17YoC-0e^~;PRm$AyU)S4; zx*PHDFFt;J`~KI@jOFETGI-K|gyNSRH~7;a%WxX7Xfl5EA4R@*#emQ1?Fm#O;KB(U z5^V&o-D^=)Z`qfhM88i+S|VCr2uD^{M-C-?X1f%>z4-jwmlwahlfIJ|K|y;$lgteH zH@$^BZs%?tBm)56Rv23o!VS(KkVQT|JG{#BU*CQBWq6m#dPr!Lm5oOPx!-Kzr`V6B_R~u>B^iF=B`gi_~&;Fevjl_*$qR3De;m2F-_=o>%_FtyGTx(N;~V30TwGuw|7WKF{>uOtW;y%rjkj-rj6%z3JWlZ~g(sWB#3D zw!)anBA?H%w#{R@Wzq@EfF(aqh|nL=v14QAPtugJ zp`KZeCM`q;GOH8dKm8Rhi?J;?frq@FL?=X12{jM6x74^FG{&qv9TFNTyr6@eCa{RQJ z@O-%Ugk6_~$ttxKANcpKf)4&A>4)nJ%zwmOD;y0~3a39=BGV<`)X%%6p8Z$XrAM5V zP9De@pHp4uYGuye_;bddl04KioAZwT?w_;0e;h}2Jd*}k%ITqRn214uhnvH-6S2>3qQ;ga z89XH(aXh?1NLFhRHFb^qs-8&AXjRR1tm6ps8;FEs$iV3{nT3_) z4_|seACEPcidS%kLMxuxjdqFp_mE#0qi`)gT+2^ckyF1nH%h1NVMW1hczacCB}a6z zaC7@RO0|@=>D?-B24F1;NY+#UDZ#dT7oJUNlYK>I;)#9L@AU9g{glP)b)9WSfB=th zTDLHHK)W?y>NVXiPIY^D9EKhLsp7y-crN%JXa#}0G3EVKZlhL!_8U+5;DA`!`gVg5 z$4)v>4|}6O?zYg`m>NQZXCEtRrw4|t8NgUnZh{C7u9JScs_8xT-NwD2yR2Zb>dpL7 z0KMJF@R~Qy#`#&2*)0Wl|GWf)hITG+=E?Pe>Ng&YJ%u(~*4%K!jii^eqw&Mo|CDib z*SQ4fsH-BpOa6d!LMWFDMwt5W_-+z%7e;qC)CZHn8Bt=AR~HNE@`!ouarLT(=gwb4 z{ZHBv$Yo<^`3f!*Dg5}EZOolD`87JIAFnO?$)l!vS2q<*Lljd*2RJG6oyvBDM0VM* zkQa@Y*mTLpsHh?-P7AkPG_EK_WJRep948G|+gR^y+tu^h01NLn%9zRKFN)#JKV>!F z-ZO)11kcoLkHzM3Yu6t?JUrM@#7*gRLyUad>DBhRdG9$z=&0XBML1k?XbewbCosdJ z!Y}zbQEX?fLmy|iJNko(x^s28`KPi#31>o09yxJj&nMDdRrkB8j(N*A066((W#pe?xsZ?H;aWBJZxnC zWOm&rj`gdR-+YQg3<8{>F|I=LEvD;88+PZ=!dN}Te3{)e)>gG`Q|NT;gwsV^Ok??sO=%TdB1PK`7Cqb)Jzz65qTZPJpeNmNWCpgY>N zFdAJnrYqd{oDqAD-KT1#u@LJ)tpRk?*C6Q*@u^Ss1|<$;O9Hl(zyJ(<2Dl-&@pm}= z#Ln@i2$2oaWKitMHwngPF`Ej~nLT?#B)yi>KJp#+Ehg$1f1doM)`8-MFwXVISVH`sAWB?Sj>5^Os3J?Kq`J^&76L!C6T;qbg+ zXpg^kFNJTQm0At<4fU|hEZWK`z3~MxB?J-z>_k-(3aXC22)FKdH=feUIzjf*I^O2) zNVk9?H32D|N@Snp1E50Vk509vQy=X~InmAcN59LwqNc>H6jhKjfeeC8X#C}=!ED5y zjO9)rr%=Tr*^RY7Smooc&C@(lWU)}Uqzqs#9e;Ydw*IzqVFtVlK7O}Y*a8AVHiP?mm2c2tVZU;bo~5=k|0aO*lug?)JL~jUl|AAqua6G2FE}86pEU& zXxwdf`6`fjn&2}Biw`5HgTr0g8Y$8+{8QJM`n4n)xxntFtr5(mB?+r%wB;!#FK&$r zsZwma2Iv(+zhzI(rL9p4kSc&A`(>nQBLyDW8u}9?hls!s;$Wax#znEM#kabPaX!^R6F%4;Ct21m*?bC1aC4 zs(Et}({xDkuxZ-G0z`&9ZNqZei9KQhl#dp868SE7%-c_x0@KrhtL!Ju2Cjph{>zNu zfrtxlqjZ;(37$NcGDQT9y6fpDtf2MBKgAUzz5tV(>Ae9zW5ew>U)LNodt3j6>5#pG zPwFY=1J#03^ZQl(1f89g8f+TrsNXxNZsx?hUyD;|M(P!t5>9k@E~QFs0zNB^LyZ%y z6`$7=XjOLOJ`w;jJ_bTTC__ClCKZ@L36lO+M7ENOwvfM2Aq{{RH`){L#5I-E!rW|< zr`>|)_>>#By)Tp@R(F(pXc~atP)qDFoBOV|_qyEGMt%>MNu@8h6yEz`{OI^`Qc;n)pA!8G@@$H`p$P85yyWYz}>cPN7;rvDSVRa#ialT0FFLn=%@$D;jNvnXT=aw-0VnE=G_Q?GP63qP&z0)%-A zWkHlDDW|@b9iiN1X?l4!A6$vDuQrZ&0DoS_Tw4Byz&l1g`>{e>daV3HOgpmGnJD@H-~tdfI&1!lr#KDu80!lXaADDRId4kgfyb-QnRr{$ zETol6Mn&2Jn^J^ClOB*9EkuZ7s2VLU3^q&~2j^G%ErqFmrMYnXvOZm=Xd$AlxM)C) z86$=VQaxe~j${AF&sA#>ZTlsd*wV^LBjh+BbxhLe3~{AMCqn3>3%9jNjBEq4wG`gk zoFWau2Bn3Jli-XDSJXiy3m!=gi`yz75hdt?C?juTTtxHYOF>FT((AjFX1j1)v}MJG zm;^^&MR%l%f0y>*w!H)YDDEl?Rm}n!NOAkPgPTvC5&Y&buw$MLih)LzkiaZCP7xP( zDj7tjPsTh#)J~sd=T2(vs8mdb&7M8Y)YWYw-Yk{C3y!>u#3=YJ+e8liFj9%82*yl4 zLW-bZT;)}79HmMsjn3DXB<-uEo*(R0w@1AzTq=jBpka*Eh@{2~i7Pqpu?dvK8GkpvtmA{i;9NhzhG7Jb6XdV*QA@epm1fsGO{ z_{uuz7^Es1M`#+qi)8Wa%=b0v6N1tlr$`vUCD_YF zIi&==N-~6A-<*X)uDpx_e&z5lDFr<4m9S72|F`OrhD)wj{D~{M6%rLSS-}*^aMR?N z2q)Y=7s(gS1frKv|HN)-zLCrZ7O}B`EM#d`J+3tyZoCQ|t2JarS1pz`;P(f;ZUhV$ zo}#3=&;XGA>rHLA1rTUz8cjBfe$mTpFa~*ju^7eYM3gS1EWA{j;F!Kn+Ab@nlhsEM zZl*QbB0GgVBZM)+(P3PRv82V#XNjynTZZha^;WpFn@WJjCsq!3sNTYZtPtk6_$ms} z24ijtWs@m41aDM~+=Dd{+7ilgXHrZHmnE#10bj~(@piT~8gwAxE$%?J89d%xBwtAo z)l1nu%R_%8%}}RJl6GWUf=`wrTBt)f6iM1V$h7pnQaWX8pZE|$+3B*`{giSovBMNV z!r~y(oNA-^UKXe(LDIIA=9sw0v($JL;hiQWvVtM%+80AOxRwk)JWD>Am=JJyU#T@p z>JJiLlf%6jH%8zGRstCc`|%;dxZL>RN;DagcM*&VUm)KJ?k@odJD%+%J0A80ERYF2 zn8|eE2~vc}T!mmbDj*XlkUZxPo7|;DG+b6|Yqig6ZNpkEq}Gzy1XnalB3+ZZbtP`* ziXeI0xBxx6H7b6=fY2WdQl}FW0VfHmfJ~AdbY(k+%TnGau{QNF+WttR1b>73IiYaW zFf-R!?R#e7SCa^ub3kF-+H$kRhT^8D_?DEcI;DC2)G_nBskE3g+Yi!2K*_d>9f+#N zX&^=hYzj3|OE*B>;DRc+MH9Sc%laXu-R8E9_;3hx&C1wGlB5%X4L?fy->}~_w&#z} zCsTBX)%@aHH>cW^8=Po{7+*-DjeSXPkR${Ywloi*5)A|p+gTrh3H*r5Pxy&pI)z>F8yAp zS=ti_r6`_4Oq=dE;I=UUW-vaRM)vz!mAOXk89Psqb!C-3V$KZG!OT29Vm=^ELF$YL zlHz7pf-338OQL=ml?FYG6KKdnM@in?=$vM3>JkF}RE zP$GJJU?@0v;C_bam}RgINnj+PQu7$3;UvO1#ukzpx_sfFs{rZj5YS6WvNY;H!U%^b zLywiE#Qe8St83B`LJIkFC*N^|To8m{n*&8qwpXQ-z9%R5oY*uDe z-5`KmT6CzQ$qADdGT|4Q+ZVGuyETt;N=+A7gU}I>51*PmsIB5Tch6IAIJ@|6*R8Tu zap;UGeFVu+eW{e!g&GxQs--wmV^)Qz-5$&yB3CKY(y@bPIe8CmMHE2ydvWH+rF|5K zrs_`HI#hNI+3MOucDOP4JR~9YNMRl&PKl(Ro)kgF##dT38 zKmmsGuOO4t^YCH116Ael{t$%#tJ zTr)i^GR#(G|4qdxZW|{rERkbwfACBM0B9c}(`DbHw$fC9?k%mRzKv9AZcd_b4BLb7 z(>WSkG%b&*wGsWJ*bek;72sJujxu7?60UHF$_E}Ripx&2)J$u0U@E2_!V;2Sxi!{| zKU=N*QcPj(O7oi#XG^W93eCAGz=ly0Rho&MVSi5SIo1A>En6tP8x?+%FUTfyrs4~Do#tGxkKO|^b-OA1gHN!2QqibSS?P5fVB zul%H!pohv7XrW1%?MYL%%)0NZMzhaW9?<39_F46pB{!gQ= zv2k9dyY>X4Q58VV!53|7YH79cQs8(Dn_V~^3VYlsFS);L`moZ?x&4G+lNU;|)2B?g zk3@g~?*LvvNbb}aMO{~jWLe_T(k92Hg&#rNan2*z5L3&s0B%D0Arzzg5VTk7KO*4A z-Qi@4Jo2V>q!3&DDg}bub9KutG1!8uvF6@px)`JgfWXG#MX?g|Wew9o8>zF8)k>vc zYv+`PDa5%3JcoG)Co+`f zRB=O;Au+r@HVfX#Qx{P-F68%`Q+?ND6Nh>u>5-%`X|n&5A;4`JKZbe(%9#v$ zw7d@IV6uV!oYl*UxxS%z;T2iV3ZLfCwdMiNhKS8LNh#uf9Oq1+j=EW#MM6^8IEh&a za%QUNM%`>87)qm#qjEBy7NqOcOEczgph$-@ITjwo>Lo?=6;II8ax>@~ZB4F^syl-p+_@L^_;G&XvC?!w!5;9w=`Nj=6k|t6r?;&@zV~;Jvd0w?1W=Wts zS;@7xvqaBp43QIau$P8qs^S!ShgS%om4$H-nO*ZtI-QPYYSqw8!CUd{whT4Hr`;)% z9<{bhl5}33x=sm?Ar(c2_W^T6;DCT*1@+)d0*9vOcs6_YqGh9F2qwu2$;L#bA1cR~ z1UamYy(60T+Rd%kiVDz5ICP3Lm)bYvg7$k+GND~H)zoKs0OsV~SlB7LO=ShYq#d&< zd~BE%F0t6dBACzPLnTD^0z7uy6w)e%vmTe)lL|4kheuOOi|C=^s2Rz`Pgx|d`00;A z(VVr;s!kT_uIzkZ{x0%(3r1w6USd$Fo$*3Kk&8MohKr(!p*ERTwXSkPs}caiWN8IwfaoaDM+z* zYQ4oN&J=XT!Y2Q!xc|ID8Gh4tnbiQO7^3AjZ}zy=9VteqaRwjHEjMeIofsKQUkY=j8VC^XfUR+xw}2=F8$aV#h)8_QAM&sdXg@VZd`CZR46oE4GTwMdx1hYBu61*BZ2huLqL1^WHnGBPuNbw{v7miQ%^R`pT{Roo!J8~sM7J<}|8rkcYMycySNG}g;ic$KN^FVvLu%~K!wFMoV%TQ+_;-!)PTuF29s;8FU1?dB!N~JNhA%@XTWax?H%F2hnH5 z0v1bp2D&Qg(-u$1@xdXg+Jz8+@3l$2M-N-efsBA#>Y1c4iJ^wW8L|YD4quHDG(s2U z_IrrO@}nt8*vr^v(Qx>7!c?eIA-IB=UXK?euN3jb^9;78+;i7G() zPYUsdIq(QWirL(vazpXO;-YxR{S1;tO*6JT+s99RT6}QH-6n~4ED~YYyXUKYtrmGy?(y9Js^>V!$QFNra9h^dTJzQ3S<> zgG|^wZHqb6oJ}XpHqoN8AL>LrwfcWFEzD?;a#(Q8)OV$BZaig9*9=)un>K4kDlBNA z8RlzV?~|6a@1Vi0W|%&xkv#965><+E44jn0y^t6U2HUS#C_TLICC= zmPFGa4wN{J*p?@c)i11x=abaKfY61!k%G+=?T*hA8hk;rwkdNMc;d9ICxDw><0mhA zOpHC^!70*4A!Qrd)}{On#LU+wDKbZwAyes#l+P2siwmh%6UCl{C10tD)Tw?p!n{?2 z;)P(DeWdSpJ1jy98V#P0;84+Y1m4jCGhoW<(9-(j?|dr zg||zPrHEM26f&9VY<4;g;T!Ky60L>D8!V>DoSsHQM5v3goMgO66kelWsc6w4x3{ZoM1D${p4jAEgkUDJfkDB9kLo8@h`J*0L9_0xJ4|Hy^(iwVzzp;o9w14-ha?fH0M5iNPcUJM zKWAjrs8S?N0KRMFs_|JF*yZRAZftFl^jm%8vrIkXD zTbJ^ zYR=kZ@?XOb0>7TH$4`B2wo=^g8*dvrS@p1G!pU!u&@tiw#i&gs%a&WM7yuVu$^r6* z5Jy3nlU)^&YsO3Jv0!3-Dz;&Zh;pPam(t#0d|G{pxX1L_RRgwZ)jb|WaZ!S!HxTe~ zSVxuNLzIOS`oh_cGz?f1;?{W}=jF&6@Zt}eATc{ieG~@5Vkwd|@&?jqh$*!KxlI%3 zmbANs*+!7Hk1*)~0y*0tfwNNzyWt>{w!cAKdmP_5GP_VxGiIf=F9cKm8%ex4364xt z-;shprKjcgk(E1klPsXGDH9OffjymQow} zL{I4sEnN?*c>`a?T(MkO7q=@f8(kWARss@n1M-mzK#tV+H#C^85m>qKQVbSd@py_a zd5bUki!TL=FO_IMRQy!9=#sbiDZi`eDZj7yQt`OL#iI`wzZfp@V)2A1oK@5%HjUzI zzT!EdT`YdiUko91rA5zqe8v5GOZ4jv6_3Rmau>DdE71qBAjQv>fD|V~iRX&v>vt88 z1)X!zyP*<`4cAuuT#0#5eyhZDCHe>#!vLdh(Ys++G2(?i#eLxWRs3$C#B+fXeUyMb z{&dCd1xvISEQWo!#Ja)x7PnUdH^L=g%7I>t2y)67wJ5};_#(l7B`yYw7ZlTR@oW&e zN^BF4ujG>@#_92u7!v%gp}fRh`h#CEroxMuDLz9wBizcQXjc92zpj4GQb;&XAW1xH z@ES}J`D~R`8SJ%S!|LmGJc_j`jri~u%8Ztewlp3~bf z-UaUEE7z=Tj9y!R&B_J_M3{^hdxSfLs-5^X(=|p0lr>0T*$XYZ=6Y*Hn0tuo#$4^D zj7&BXH17>%)bk>ld)zpRiT$x<<5md*SRYj>Y5YsCTKnOv>7BqgA1127sHbS#E;anveoar>h{%i_jP0h5Nu z&rvl$IHDFedumHJ6p9tMeI{|Oxb@uPh4X1j^t!cb+b6>AAa0q&w5t+hDf>3az8ru$ zVKoe`yoTL#O@nM7^cVP-5_yl0cQ_$Ih1OYv{#zpFeQQ^&zV51}77gPSVjl^-31aCE zQ{R&KD%+0JIs~7bFVZ-4pX$#^YPjI%9V-|-wtw}i!-99^=qQD~0` zOK%|Lc6t0siq7JtX6tEaW2e6XZ93(Wr9fx&#`P_6tlYk<>#x0bHn%*dR`Y(%9% zjL!*<>BMR18pPt+8l#k#)&*!Aw=xQG16|3&$U?(@s}@dzB?x-2Ox(By2o`A5#f-LP zlJv{bfbU9j;;&C#nT)lPIzjqYnr!(T>^}GG{a=ZH zAFBukpi#vK&sE#X6AOlZ#D2G60=2C&kCw~S0x0p@OZ;87Y+7LX28qQcxd^H<6^oq< z!M;Vwt+jY+-poa_9;6_GpU`b$yjc^n-i6>%7-4A`-D^~0*%YrE^)Ic>~gTym8~$<6pbn4%}B~_x`HkU6+BAVODwg=)YQ09 zE{S5xbl6a-vpuOMe9$qCkt9L~Mw5ITv+AQe$z$Aso9 ze{{yT5xYeT{zfFBFB0;K<+ft^tb}k@M9ogI+F?;jcfFWGLd}f4-gFSskPiL{iKjUz zu$x4(xK8@W+)6!nO*RL+S#r2}q%z;f(+loqJTTp`5dqYd1xLkb zfvd-;()vr!vTH_d7S)arFg8+>VPQg?E!s#0-%1TRxgcf6q>MnezfqaJP2lGW6=e55 z{+9Zt`H=;0P+#^yw2}!A88h)ZBV7?u$gmrugRgRcxiXo$nM~)L(A4X!_2~KuIV@Js zM6G9PW8~?>Ys>Q|#OC}FTuCYPK%7KX9#@JekTex880>;!9}-`m!KRUi<=ImV<5JNv zW-m#d-z7=L$S{9Z^Ub(PBnf1x*DgGAvf5v=xd1G0X}wu#>}>}tmK47~o%Y~NjQ=wX zt2zOK+QYrD!}X~fV+k_QE;Sc$#QucTuq6Z>w>h$>g;T2Nz^JfV4XiXC=^>tpeJUP{ zD$wCfG$z$p@yEVVyUnTbpSmNMtfit4l6(!4p=y*_wno#Xxp2*Han;tYOrmcZ@nU&N z8{{Et9J!;(S1j2b&(@}AY-a+t`5tT=Q>mN8)?>lN?aK#==N6aSwIX%fHO+CJYIFOf zsu6}lf`+7aIaCjKXpFp`SUTjP#IzNHQ=%R5`Ngp4b~mMNi*7L6gIys}nAh(klGuxN zHe?E3L%fd}Yjf&GoSm?u!6}l3!bRq%AmQQhv=x0a#OvN3dDFSFs?tbu>{mp`1GVTx#&pvlDNr4Np zrJV^6BDJ!3L6h%nVP5(%&_;yW5~B)UD@jUIt=?2j21Qy*;&HLkk{CXiaG7@6A>MNb z+iq%BHPv*29bNPW)f(Aif@ms90OgNa{B1F6S>3z6I;>EDbpwj0Akz$c1#xN*`9 z3_H9qVrE?4)_9*F=p;yHMf5N(E1BaEgUNe4PAN00mw@z?1whaowr{sjPo*ujk?Bw_ zjh%<4glvk`At3b)S-4NDjd+ZZHz*J>8aNn@0A-8EfkO#_jhwkSbFmpmfGNdoq(Vu; zprj5JQ5J#V@Js7eBLJo1~yF`G|5_?$R@uUY8bZnhZRjj)JG7r6@h-F53;oGL$vO- z0SRBg^PL(C9uy3IY6+`1vEs3p^;ct$)KLhC^IB`L{= z-{`0U74%^%6e?pvag&c0UqFwWges=WWvEe@ltMLNouy<|4dK`nATI=AVV>a;k@9G{ zVTcuztpzz(F+ccc5u1}HB@M2QICq}`4@Gstw6$y@^&tVJLW`1b&bSb;Fp zL53zxCjp5#Ls7KN6axY}Yehc)bQhomV;!u$xx}+8_#HS!E2ZEW%-7zV^J}$tY6fu$Z)Aw z*|`)9Bhb_1kB5_9a}`b}5m8>gE9{x5JJ9F5k|ZdzykJkK*@BEre$HH?{=IG}7|Auv zL+f-J&dYB^7P4R{MAbBp8wuWT`I70Bb(dUqEn+HvMX}{NB&{G_0@f4?IFYe0KzS29 z@AM%M(qgJ!S`b+;46*UToD{b_k~Q?3j*0|Gx=2~+^2R6;KnX)pEsa)cV}uB8_(58t zN%8otplmgTwW|Dkszm&~nrrsi>~qQ`P9@+)&QHi0oI($XyF1$Y7$H8Ax@z>ZSl^sT zQAzXWL{l~op_#_mwh+0&ZEnquuV}3YpNEyk7}t@pI za03W>$!0F}e{`pkc$mZ~=)6Hxb)H&Rfz2*^yA~#s8h*(?rNJ)S`IXpNDKAQ5HcFFH z0Ku&lbVJb=c@m+Vjmt(|gjyH=fz#T*giY53KZJ|tj8fRwq8per$ z%NveiqHu-uUK^E?D?3U%@EM9dW9krt3@f9XgjzGUSUR47l96TK=cDe4L@1piHOty? zRkH<%pzV9;9H=|^X*)1W{pousVNFo&xa6?@%OGh+sp zaa_@jx~vSTIH@2(5n?9b7q4(B!_n59K$_ChNQ!hK(YDljV&`ZE zU652VgjB%7qv_IXXQoKg)2g*O+F`if+?pbgjzdq9#VfP6WZ1#}`~EGl))EZr5b1UC zp9@ky&KL5dA0&@>DbU(FU#R+)^EQ9wO4LU6ZNTP?$IwP-lrlUs)kKPVjpTL0wH`^( zo=7LRkj1@8L;2lFwY>ls2V+62)bb-gJw7!Fk~Sno!B~tQ!pDIe#Wy(grAA5~;^5~a z7Xxe62jZ$Q=36$WL(%muZEX--yi-GIl%g>-G(l?hl?0mv!e=ewRw!0}(>`m*5F9GzTx(YRB#=GSSBf46k4@tXHCH&6oI0yrR33RJ4jd)hrV?oWQ0AOvc8J(p` zm5?i+tOipi??_0B@udmUA+fv3LY3frPBD)!8cEiG1jM*Z;hCy;mi^g&p?7;P+E|m7+-P1qOJznW-XOQ0RcR{FgI76W17$;+$hl8SxOt-^xzbdF(pDN z5}%^zrXUT9PvrN)&=*f}NPs7B`dokz+5#p(asM%D!jtAJ^(QpfO21dS@V%z8d4PJ1 zX+K&o%>tCZRbrs0!2pm!ggl?5NJqa(xBx7bMe3w|1snJPiyjr-1DC9OqHD?-fw2*k z;vCeVRn3>$S}R)vWoTPAHj7JDgZh;HuAftx_bhREB}xyumD|ylS5*wzoCQnc%0*f` zQ{x@f0lK;&g34=a)$v?g>UwgH2Z|I0^gk4+|-g((ba==AZicE)zGT*%@jQ);1J8Fq@*#Q5sWm|a~cg4 zKcnr}wFDd7sJ!}CCU28f9t;l(R5)065~M`Jq=7mR22hjvQ!eKUb%gFxK$*;`Nsql& zLKZHz+O_6+tS6C3tzR$Rhn(iO4vE19BM*>BgS7gv!^HiQR-^YASP}&OchD-5H6#&C>HA6i_A#1|x{s zo78wE#--7HklmtgTFj5R5Ncy3eVF2XzsXU5_v3fMrYb3+YD zWp}otYpy*!2-Irh+#w~;0UFuW;q%ZthA71zvvR-{48tAH%N2lST_+lVhOlBJVI}Z2 zOqx9n@qjMMqG6?u?`*mb;FPp{%8ItOdbD&|4vJV36_4$gU_ejI?wKyVD85K|pKL$O zzUWAjQ`5Ayl)PP_f-d>KrGYNhfLWoi_&wFIaz-e5;bC{pse^dX@f=JRfNv(O?p@hJB;NrbXkkWqsCaFigW}RhJ4NV7@60f_|G#j@Ngpr-4FB~BG zFR?K}G+xy3wr%))7};LVbxjU#mmjQ8Q?4%Iwd$D<+Vx7odB4P1%C<>*sIcK9mV?|c zR3$RCJ;g6$7Ui5=o7$XxGfl1qc?%kYccdJaIab}iuwnJ)Rk(m;UyX-+X!fM`V34Yj z**rnA1@h*bdo?#(Oy&gB!r>%hlK;g{9Od1T6^2sja2ln593z=1dGpkf*?02HbNK3~ zTyZkqQkD|!x#=KqI%0R8l`??ECO|6QFo`ipV?7EbL4hkP}h*g-wl=1i!qAv z0v50%?j_l07{fQig*D2HEa8c8)@jztLK%v?sW(JTII7L~^0IKq;<#P)tv8ZFZ^P#6 zapzU?#FjSvbm=Kf2_%#e6iQ3Pk}j(Ps6VXIZ`&xa)j$agCLNR%uu#Dzu{g&$PpLRr zIEQRvUXPzl13oHU!9a&mTo>STZmtqqk>u#WD^Yu1kTug!CCUUzw8+CH>at?mspJ*Y zx>(J3*GCW zGMpMk*0DE_JPZm>kFKDcEqEG($tUrw$4ih=0L|Z^%K}HB6XxPVouD3Y(&xpqq7<7a z)0Uy-v3G)>xk2UaS{JpaDXtMp`^YJch+uB6U?Fa1cf%mOQ)!mKP5$>3_BoOkQST^a zmt;zgN%j$JCZPw|M&r1RkfJzYXEO`liV2czAdZ5!NVZS5cglU>^aKSpdBQBU&&}Tr z!!L3X#ydQ3@!;iUa^A$Hb_$vUu{0EJq~e#iY$Qa;dfAI-r-v+3WKVDB5u3)+NW6nyhwH z451p8?8KN0m~gfj+37ELA+6@T^@Iu4B8@V}2lH(4S(WHZcr<9rCS$ZIj_{N9#uJjN zjAEv-?8&XgP@p#m5*F82Cx<_tT5Onf9G?_I?)5=#x{rvwT$;mOS}-7Tevyt1KR*o4 zAri%xAcJZ|pSV~2thCi6U!L>RD z(SoZET?Xsm)4>~fxlr1jrZ_{1$%t-Cmz*s{fmt&vzheMAMcH zH;E5xAQ=x+-X5*82+fiyTh7L?t~H{W9hwz=iL_gr9bJQJU?o5--jeD-s~wpS9sv~O zlR*#SOHt3pl9dC)_FvqqrcA{fGf6pRPXa?2E|bu28+ry!4O%#>LGu&ZuPUN*=t!FU z0FDD;H;YyR+QN;9=(b@C+Df)vmZvCg*6l$Hp6vvvljF@#T5^(7QlryviD-F3KHA=_ zLSKci45!X7B!Tf{0=b^bjXvVJEP|xZS5em&)JEB&lgyS-t)qaX1d^MSrIa5{SyfrG z4Vk5oa#~I32A-ROGZqGjF@o)&mJHLh`c%xekh+ki%Lgn(62ys$MUzUgK_pSzT1nPi zQ%QWvT?KrF9i_;_8b>RI&pF8%L^+*ZJvqo#irzwPt(uoYva5s>Au=g%Le0u!MBdh$ z6^CwL2w|GQmQ*2saysp6dy3gUbyGw>AXiubUlMmA3o}gn*9kK;v3vIz)AHoHTJp}yovLp+6CNL~T`*@m#3UwfA z0)ux@8bEm+q{#&SF8sVW?G!z6;hTMiy6G z+0Z{xXPS&RCDun0#`z15RLwmpie*VUgU*n&bg1CNBrEse>lCzeDb^HJ7lR*4rMk$^ z98Xp>F;e4&YS(%Vz!=(-;H4iNpl|%)TgUbU|!GcJF zH#FmMk~_05R>_{cJ$V|?%HQ{4q6)&)XD68qdO*wf(64EUZD~)%QjEyMiA1Cw)}2%= zOVKsJhs=*&z}yKxOYpbk?x4)OsDX+$gm??Uo=O(y?2b6~j{L-yA<~oa+E2=0i7{8? z_0$^*2h(JQNq8vLhugaqt9=@7{cz0W&f`?ar!kZ*7&5wXFg2-ey_BHk_LzcPltP5a zdWpb#>Itg^X@nesZ0r z6EWyHE}=$=uWKkT#kthp;xZH(KltV5vbyCm#aF8x(3Kdm@Tf8QeFtpPT=Z;pDp|c}C-V;zpX7t9AuS;E0HNT`P06mj87FCBgOq_HQj|Dq`@>=ofR;V9U z+y=Pi#jnamJ!)Ki{dNH}uMLlGLyw`QmGFBt8%E(uge4t77*!Ne@cH&Mr1yXp-aVph zGrPA!Xsi}eXz+2gB0YbK)WF$-CbH1-mfM7a+PBs_c;p5Vj6E@eDX8%ct6S**N@C$0 z3qQiG=MIuea!q4&_44Iwmo-M$TzAb{W{A;+JaBQmUx;ZY6`l>5M%ZppHz3G^mqL`zrE>;^}Ti1j!|FaH{Is`jWODDKcv03&0FGa z*-M4vyX|wjogV#R$ZvalRc3q4UH#MTk?d(bX%4tj*xrZs>V{65?a6O@dq&atRPG+L z_2Txf{fchrpKsHT$Zxah>^t@^(ddnd1VLko)W*%x&9Qjl_-ZcH>(F_zeoKDmjnCc~ z-=k(26k$5)7WF%SJq|3??fEXyFUs$s+7mqKRqZUTZ>*0ttiEQg#uf`dBDx);;J1^@ z@+WOeSIo1;h9>+j-QGH$RsU#D-nae7W~hmwccD~0W?GsjaC(vQe*dwbaz z%`Y;~+Ze2F$9A_}!^?=6*9v}H^JHtOf6?grShQ{H&GDA?lA3PN_9E6`XwUj84M;n= zS;=@Lj8QeM{#Q7@$Sys;51Ea~Z+m;WH}yC6Yg22AJb|}RZH-HX?cH;SZm&vF4#SY& z_Vy+fG?;DA9A;sAopeNk6{f zN^kf|Qt#i|yn(WcykQt~4WnI+nSR>Y2klRfH7p#E`@0X0H8xfo#>wZ#8nUH$MD^u* zeys5doqvA+IHPM~g)xEa*YYR-wsKwm)+@4f5ypL}89wZ>!P9qnt5PP+TvBa`mBcSVJreBU8n_ z-f2dEkJIRQF=2E>3}bJvGty7Q;PD=Z(b!uV>2F$N(D%~JiPa4m#?o=X8U6XdfFmS8`oaw*js8IbH|jPhm3jg96gUih90wwcLDR0 z@e(sRxo`|eoRhofa{qkWSbB`X=U|Po(;VY`#`wk!|HM2U%(o)*&Mu?lgtMYs;2brK z<8L^3+*<+sUUxdW*BOz1LMr#ifd5mB?>EFjZ*Nu<#S5!wBXFcP5x+^qGzw$@61Fdff(Cj^JhPid391L~T52N(BmHrqM?EI09;7}d$d zo9kz6^AUU?T2)=Q<`4t_1fIW|1~6aTYaBnq9_(!x*YVNi;C{pMFneaxQxlBF z&YjQIbvnOP*Y~fH{wKGG)Ou&;1v8HR`D$Id4mu8Z&Z{_l{t07m=dOyw`}qAdZS+hu z{QGy^aI(|+Z*@J+_CC@b5FhoePcqlMUIY2NoIa?RCAe3e{HT(go_XbN-)a zo}3rXbUB@eJ8AEYI%DrX=O-TSY~4}U)3&qjNZXFO*v|I4bvt*|T?w4_IcJ<~I%l4G zw|P>H`s|&5ZsyQ@{XIh)ee=%CEclHc^Gexw^JioRw$IGGK7UrG7Z~&~_i^l<6YTY# zT~FnW`R^EW&-nZ?ulSfAbM+iO=AQX8GXwKx(3VrR^(4G|)<}Qz zg+_nVIHO!!Wz|Q-fnqD?HySMW8$GY$HZsrdhQ(murcw6bv;pF z_{b0IIv=>Z&Y1WN_rAxyD(-!s??2#sHQx{Low46Jj_*(N{UF~fAMUyH1N@D7xF>pG zQP0BmMV$})ulOU1%J-PF5%wo-1`jQ8~A<)-|ysmgzxQqXRYq6=lf2+@2cBA#C$E>c=H0s-t$<` z_LFlr%ATLstJmuibKM>QAC8P`U$bbUL$ZcUw%+?K@6{MJ{~~)v=!f8y!$|+_97VGx zcgvoZ>-(G^Iw^ak6TFl?5|njyPU@~^{|q?C9Dd`5b6Jb?*eB<+Pguu0jm**WjsC6* zEMJw6*bD+Ayk&{nAdg@rN(W<}7@=(rBzYC(?fzWA2+*k?GlGbo_ue1#kNjMn~8D zx=impKAjhz>^*37bUGb}@0rZpb{QT2#B=-k-Ro?~yf#mtg*!M4ySR3Qx!-z`vA2Ea zfx4bq?Ta3De(Yq|u8*JWeV@_a>HJDvmviPxp-(-#o`D8@rLK2jq+hPh9s|9gjqMj2 z$4Rcz*y;TDx_0(sm-8bhA9Y?t8yBDKNjMX!;MFclpTM5!x^OTXPOOvyheY|Z0KHt(Xa6bIwZg6vld({#Gjto2Nntb^fpBL z3-RZF&CB7>-?Q%({xGhpEdE>rY_A4?uF3%GEdKmR@aJbH{(RrWpQlayVUB`7R|uZG zY~s(4bNKU%^EEwD_%q4GpJ^uk@UGy`3-4I?vt&s8dCw5|GtI=GKIWChALe$LwH3U2 zk?R8o#~gmU8ZY1>8+(8k30&Zn)-dJ|a;9C&y6An~%e|gmPph-FkNwy; z!RUu>aqafHzJ+`~H`gWoC%&!weW}Y_FZz;w{iL}r4VZj*xyox`qVU;vLQ-4gKaYgj$_knZ=A zFXhAPG80x0+hFyJw>4f>C|DKKaN(aj?+kOU%{;k+bGgeo>$VdIjShAGKIk~Sr;_oT z^m2~y)(r-~d+$Zw=&3va4PKI22d!=T-&+1` zsxbOVV>5a8Tza=%eGZR>mpz;3@WDB`bNDCh7wa6plyM3DEw%Q-$Ni9V zIPw*Z0}tM<*FJfN{ye%%pTjFz^PYz_?WyR{_k`Z}pPR!4IXe^|S8xs=e`{pta2@zC z7$4_1aitPmv7f`Q`1RZzL*UTv3gZtWkqurQn~y`0&uJXmNFS<=5y%GrMH?f*p#!vO z$DuzHSpXdRHrI#3q0R6kXA_4!HXK@H!=e9S!g(khs<~6+(5~Awek|Rbk3)}xD+lh= zICS*WIUH&l0S>JjAr5_g>fku^HSWnixP^Jyap*72xozZnDLQ07M;>&}3Vm(TtdRX2 z>1WM{J4g1d*YmZ`k-vLI)?1$=!N}m}$SsVy@EqCwGlx1yR^F@IEIdcPD17&6oFhMg zj~~f7vSw=ObEM~1bNz4B3#YM>{Ni!0DVLwdT4SgRGyNBO|2& z$H(N;fSMLf16tszM@j=;qK%QzfG34dhraeW^*M6!L`?&p;QCNBpiOuS(FdF*dCpyv zJ4foE`_?(KmvIeujuA%$Xb$W5#ms70S@gk zacIFXICPZr^lai#=jb(y?ug~x#o5aD0-flpy$+^aCFoo$3Nq5Uvn=sS>%--t_fdXb1}aw8#1Ef z=^Tg5GEeBoZi`u&jIv`($B5de6H5*9(_UgGp|O=bTS`DMf;lDm$$D;7xeR6beq~Dq#5mbVw`FL24b6HKu8{F~y_}=Tq1n+L6beR`!P`i zc62P=0^g^^j(6uW1>1e1??t9lV_6*O|4(>iwGO=V*5y2B%8mNDgStO-urJATv?XnS zvV!N($I4o%=bW^s#`S}`|KK^!+LGfM*d_X;^O)=T%=rT5j$Wx@Ovgyiu!uAN3^T%- z{(RRna{A+?jm`occO1OR>bM^?GNOY#51c>X(e3xnb9A&$F!o|sT%&A?KV6C*92${Tu$d*18yFuII!rz&RyudKa9?M7Hy#SM$Yb} zpWEIe^l2-)fsdTr1wB#piShh`@#r(|gTVe;)?hXIv1>A{Nmg(96l-^2p{7yu=4kr! z(nThXN@#iMo$=TJm+JknbAFCSJ14*>v48 z(RE`Rjhnje*CrW^)tD^r%e@`wx~+S^Z+Bo{Cgf2R=bn)jGYdkvgpwP6b-3t^h zMf!JMsp;44=I3XoX?SW|_Y|e`e7;urWcHdd<(SxegjTL*K0RX$-)>;X9()8otFe9O zvvp!GYu|~D44YZ|&L8o4cU}8j?10$J+IN1B-?#J0Ud3+KzVios-ofV~KJVlc+Jfz@ zeJ8X9`&m0UD)zJXozL*8ZD{Xodq4AA4(=?2R#hmx>19rpLEwXK^~kP+bt31U7^uiJ zx%J$tq4&KHIhBp5hi7jO$g>A{R@1T=c2q?RvC)-kztE51HT(#~Z$Q?r>fa*R;ivv+ zwxi^b5!I>ZN~7T9%FOy1L!g%@8g z|5-250T1QhQ*G$6j>Ny`Yi{hAHa{85H>Q+-4?ZT!kLSzs4tt`9vurQlE&rYl^ZMuH zv%tUSZujZ=_k7Z9>j3)ct$aTn|DOMrvz_sZf6w_B%9#t#!nJ{26^Bno$3gciq5oB! zZ`GWkIllj2|6BW^9ATgIIHz?}D*#?8(*LD5=3FZ_wcE|l&-7ThG|i+NQ`H$a<4R-i zV`H^_T>AXfd}D9lyoStT;V+?0J(D!=+z0P0ek|gP&^%Xo=ar0S1><^u=45m^XR$t$ zgdeKn+<9Z3dG0W-eJ>h^FBiVmlqDh+_#r^^ghqG4uUgOd@NBP*uHDExgY^^f10#G+ z#9w9It9m~?_zOM9ulZn;5T1E5^jvsj;h{o2dtHU$vt$n8iC>4mdyVy$br!!4 zaJca`)~;s~Ja7V>0$xwUbK%pIIf@Sc$JGvn-@+Hy^Q^qH17FF`SM(Y@{k(>K4E;-` z33usxCViLUF~nBzW6rIbUj6>TpX&FY`AKdK#b*B(v?cuM^-d!w=cVAP%u)DNp#v7b z8kuP9rM5{)UN+1<;bkWPr{7fM^RmLneyf0o{T=Hi&krzX*-JGq&H1WvdwX3WZBzVE zXCzGB4j(+>rgq?>cz@t)_ZO@mW0>dc`D#0IiSSd|-|%lQzdf-3S^nl6|I`aR_^e>>4T$Zipl_>>`ztk$ zRD+*J6@Itp!pAy{{#A@+!agIs*kK%BIwSICJ?&jGolkfynXk0lcLDsR&1X|^yb0PN zdqaG9E#J+)U+A?gqU{e9_-;b)t`z*WU2nS3=DP`fw)<{AOaDS6Yvdj732nca?~1l_ zU2sgUPnS>HfoI=u!GDr#1hk!Zv%Z^rkMRCjvn`?R6@0g7yZD~Uy4%N2eG~1wY0e>L zXy45i@JGfabGhdNS)0k-f5kQNC4S?E3g~FgFZvDId>vXe33z(J1zE>Atj%Y6PxXiW z_W-!@4D&q{kA!De8=Hhqi{3)^v-aVg)V(Hb?8S$8vX6HQpYLS9vgVuqtMdMt-%m%v zh3|fccY94*t3qE>-zV>Y#GaYZsp0 zo~$w3l0A7b-%n>x-VNNWJ^ASQdQX0aYr=08@>(ME3Z7Yf!d;i@y(siAd&Yx<;ukfM zHb#PTUvv$RbDw2gf^#b#owL!7b1VAwInwt}7S1gK=kNs;oI`%@0-r~UbC)w-JC9}Y zaTd-^vf*6X#JTqgoJ!KJ3UmYcu=)#NCFiysRzhzabBIg$1^PZyXLmHAdB1UH9}l5> z>C*f9co+Ls?~y6p?{a=waU6D9B7MldSft*c(tROgbg+N*^~v2|q&+#y&yle^E6{~j zWZt~d3GX@v9yVWhBx^mRF_M}3vHHw(Waoc?=Jq;gbT4*JITnEj8>pCm%;0kd`n3~x zL{1WjaQMW#1CPygMEbw|?!f-{?TVf>sx;l0#Ts_<{{0ok@h0c}_s*A z>b1FNiE&ugX9i;!sF-@JZ|9fp?Tyd4Pug3|Sk`fl$XF%}GM1Yd%S^^n#aOn(8!okt zrHZl0_-9QVd=1}uqx2ez?chwU;ml#y@FsH&7s(oOrVXlJ`naY?vfd8>GfR&Yo2&Uj zxo+1XZN4jaU2xWZPo6!S*G@$bv+w%S^V&BnH-7ur_wUSIKbvz{XN*$6w7T@Xj!e#t z-#)Js+j7_czn+)SpaJJN==g=urb*E8$_V5vO4T7>fl-fnKY=Ihk8N{KPxTFbjyUgNnr(DQ$gYumN0LY~{fyN__K=dU$f z_L=xE{ue*y`kUDE-h_vLW8RX?Y{$%Feemhi`nJ9B;bZ*G6kb>2>ZWxIoqQ46|Aljn zy(by(0VOxg=oY@yJI1m1e1|dlB+sg`F^;3o>D^zX{W*8*evf`wuVe3vMn?_%ZVYnd zTk|9R?}K-E<3a4Yj#GD)L-@*8bVv-_?Z0L% zvG4U<$T_%H?0?fvsyH|9_4bJW%!f}tK&*>F8-oA;{55!U&W|4KIX&mKFWR>@km;F< zEs=Jmy=&paE@6JXt90L)^|`SfGOt}W#CX;XF`j2ijOXtd4?M%5PkheDdKMb*^dyWYu)p++&)P=LAaG>A_=?S{G#*<>-(B?eEc(eV=k+H= zHj9D7J=nyKU{C8AXFOcPGt=24H~eNGZ!bN0jtln zpWj1&GeyTU2*31s?!U9`2I$!J(6j5HYmLyiweVhR;JwCayC1S@6?~=xTJqF8%nu(~ zX?q9nzHFP@CC;4Q6CNcGMz`?nf53D8888(7&3^yA=C$uIPO%+3%-{QVefZ=NQ~sAd znxYNMw^n3j;UlFjncq~~dtzIb>%+n3gS0nO;K%$FFUkBA?9BPC+ihfGy zip6Wzb6@y|N#K%QCYHWM{;>2@nRQy8Dw2h5HU=xU_%rx{jMVlp=NxT&xJ}+cKhwjy zZ{fR=h0#;Cn%6hVC$h5OyQQDH>537^!nB#yPw_p%`yV&k5?S~oe79s_r7N<{XQVHl`nYmi6n}<-|v0 zYWF?(?Q~XdeYk@2Zm}`>*oy0o#*?#*{x=rm<8f?kroPJ9FV9Y&V|4t(e@8MWoHM)M zqK`lS!>3;Ob;XQhPjlXMA~VV}N2ERa?sBeCv}4^G>1WN!-FFxwuXP~P^?zg@I&5Tu zH}J)g@trufEYmdB@O5$qbUMFK*9DBboRgq=&cg|T%Os=!QTEKC<@MpWXB+)*!Amq% z8opmnjr8B|oP4bJgGS>_&ZONH7yU@iDdEotwol0PIh`GsG3LJ4#%1~%oE^P482y*? zTgF+%r{n?A~N$F{967OYw|4fT#TIA^#QT>SLe-9=G4nP z$1@+9hr~mxxj2pfL$ymY&!PMIE%W-y#OgiI&Ez|CR`=%{{eAyu9B`korwUq$ZlHg~ z`PFK^Z#u_!58Pz*Kk2;i6E}!}k{BZgK)6WUwas>T%BMehpu$ zM_KnKQ!m(r{y^UQ9oOZ47=N)&zu}YbM`%ZQMB3Uf`hl0!GgG?PbN!E8muoHNGrjMF zt}EI`ZiB?qjO=T{rRfQyV{e?8SmCi4%aiCn>)~-6$mn8|J4zb?_EYD(1AdXOf6e`P z9XeaifJs{eCk6k!wACAo^zY^@61;{ddj3t;{_QIw{X&bRAMmx~s|`lS{mvQ3BE%N$ z=1i5ogrB;Hex>8n`5Jg&{qDU1xKDKKnSLF5P~Mj{=#3ivuO%4w>_~qH zXOY4s`ZxltzisYsiBXjMy&o;m8PJ}bV?Q0wI#n)xxP4c9-Hu(_M`btTpjN!*dnSYPBg3N^YF)oZzji!& zU_m|}9RTK^ghokUz4Ud_MCJ)R_D-tW^A`AI;nKD&E=@kx13nbv(u^!F-ASJcmuRnQ zhKWzrIedz}HlXn7&fjZ%lJyaM`Z8k?T#?_OK&DxXT=FLH_#=B-;RSnI+bm{w|3_7h zRv$S&(0D(%D9^0qOtWaX$OnSY263&OmG8^b&)0)9>)vbZ@2l2ldl&loYVL&in=TYCm0spAY+~&D>7{lP-AE9TOsq zMW25i*jeTLi!=A5!0!QY<_LSjy1$zJ^V;l4zdR#*NcPWybBxDQJpZljo=k60&t3e! zy5@Vyqrn_$dmKD;XXPg!zDo9Y3%Cdk6aHxqpFQjY*>kVrH*l`pr%lZWmIrtnoauzbzSrL5X9LKt58jXi8U78t>uI%zTqT$v%VeBW@WXT>gElANb2NUBFhJiScQ{;1T9Gjc3=P z3yn!G5MZqMK-+Iw+h*2Q)?qOEbwBM0{VKHiytC~u;h#PZAN4W#sgJ@}eFVP#!^BBf zD!yLU{3!CIea)XYabi5<`VMP+XhG>U{%i19)>y{e{jR3FpMWO4wpiw>d7!6EUdz}) zzK|W*^w>kfn_IGw_>T0P1Ap^MO`g5skYDec<~QG~^dX(d*7iO0S=pnkkFn#KI*CUY zc`ZY(jlON#U!-r|xh9T_PH$E>Id~MF%UFBZ6H|carx^2K@La{XgvYVt-iwb9G{)Fp z_Wkn|v5-x)bM$9=oJzlPex(160fR6TV{-goY}eVBf8cs;;dP#Wnd=L7yQ zEC!$TsKc?h$D|p*gf_~xZ*UEpW}XdK>?|?%^VQsou-`>yxgFnmvB`=I^2}=kU$*SB z^}@5@o2`A_@r%XxLHN2td#SwpkC)|aYs!9Y+H=LnvkE-Ai*x@nc$K?&U*`qOx4Bwt z^LLEB26!F%VU24x_L_`o^IdVV-Vc38^x72Kd=IhjUS>^{O$EJ5A2=nt74i3Ix?Fsx zO7|CHT?d@SF&?rG_`E7PMtDqt)9dr#wfI99^*nl(=l==1*$ZtJJxM3L-fre7-;psD zpDB8#yYSnRwI~(0&?Rx;k}pE?{~RTjKwdBStj6l!)hHTLD0}zJTbj-5bs6g<`jY>RFPi@>%v@*+(zS zUYw@!Xou|W5`GacRvLRFjIHTc`Fmu6>Ff0DA#0E1+o%WFqeJhJ;xSqKa>Ki#BYP4* z5Tiq2w&0HUe#mf$FSF=Vo>IBALYztVXLjFS{LDh0QFerb4)h!j&f^NsWhdwJ7|v;8 z6}AyiuuI|d8uUqM^t5CoeCR@S2eg}^-KtMLtJny` zo6Ee!r*1m$gn8#S-r0`LRQ!i>@6O=e>FVA8rVV^)_qLT@sjzIrwG zp5HsGyM-qF1o-^gG2vJZ>$TW$9$NrE|5HaKyaT%MGHv|%X4?uGr$m2gnFDvg~-oESJ?eNA2kk_NwIOOvW@6^9=!Ka_Q z?}B$5UYPjZb7y_9X-;SkwvB;4)O;WL`}h&wmG`QU&z?e;AT%S=)D)Wil5xBXpM|@c)`eC!EeP>!hs02b?qG41 zaXi7e7Q9{m!r}{z`LRa$_k8}2&ks}^hd*0wblg;}{YE9v8F$0@bFNinpVQY4bM0ibRL;clDrcf$aNbNe_WSr8*p9wq7km*i z;lF@`f_M9{n;iwuUIa%ppVa=mQ(*sj-FNS%6Y4eE7h3YpxYKNGw~&QQC~r{U5*`Vo9jXqR(S+Wh9L@C>__hv5@8?PL9Wi2pu9EK=v4iyl7k zgg&Rmmi0h`y`CBuc&i|;}K${D`qiKG%Bl1MweU2xp-I2$i{pN(n$sk$R zlOQkECUO~WB9~z$xePavqw1sWi>hO*9)I@LDxM*i;U?@jmE_MNT_(;Z=|K(r@oW z^-mn4-@XSm{G{(5Vyli2mnD7o5SJx=zuda&@gLH6)`mpiJ;Y=k5t|r&_Yi+|MB)R1 z>C3>fCxPuNNL!VXN0B^b;aYWj?+%1elPmt%x zx7Z_jE6I`A)4IFvNGrJ%=X1SPawy_o*m^tHKEro%cD3HY_dEFx|JAy)?$~|xPaOM^ z@x+h5x$5y%k4$*{*pDJl9DC65#HvSDJ$}q%Jl@x8oa}woIJx5ZipP876COX(x~op` z@hEF5xF~T!hkrNl<-U2>C_jGW{Kmd*k^Td+w$A(NE(bp*fSXO1I#|nJ)U6xezUZ&e zhu_5cHn3}aUT(^h(Pz|oI-ciQXT>*HI}Bu}foW0BaIpi{WHjA=V4JxYq1C(J*0lPs z@j+U5hmpC0K6`nm6h9>AtHdn4jxH^-PRHS)m+e@DTqyoPol!FmvPI+hOXlZ)eOUJ= zGL;kj{O=m%O6)ANu^kBCE7ud889l`7b#gA^m-k2~bn^iGStsXFSKR1mt{tcN(cby& z>E*gT&D)M4Pv}_XQ`b9x9Q$2l5=X~d|4Mu8sUGoZSVxGtZ$jDCKT^Q*X9`*I@==j_8yhLv!Jhj!=J@8*A&|6r!(vKG1^BMd%WCV2fT}hpnz8y^NdmlGtQLFB8s= z_cET36JvjZYrT8y^zwrliJe@|9D0y(MHl;qJj35Nh@r8b6?%EdMlXB5YSK37n$XJ~ zw0{%%FS_P$V?X?|&Li#J`N7N^31lq(u6%jm%f}MfaKlhf# zslE89S$kaU&u_PU8{WV&=Y;3W#r_UBbq-=VPmPPs^K`(-5U1JSd(heOZ}>}E zIP%;a|u zJzw|SOD2;6(a-1i&*zVM+&lN)v%SxIKkxHC%;DSYb>73XR-Q#ajK!17!&b%Gk$=*> zBL6~1B3Jn?%5RA9#Qio_u3E%@TeDrcT7g`pPRZ4#zLKkBh+C~2wS@Zi@^0t5mm_Cg zxk{b?_=@>hads!U+L+w6)|%=3Dd@HA6e@g$kz0Wa|KJpE8)-yD>^8cl!GhgmpI6fqF9P5&hqvzO{ zAfJgNDV||)TmJIRlq-i1U+K)rE#by9>qf~Rfi5>*>onq*AOHRA5o%`(_D>f5cjP>M zmz>XXwbTB6`wZ^ZyfFKTyl9h3)&Z5iS~<$yYp!O81(l8;TI zu8%s>{y$k$_3%wPbshLtkBq8=_Om8;rv0_lxoq%F;W}tv>o}FgooPSUrG4SMr&C)l z?OQJG=f==}Rt)WDeMj2QiJ^Vbr)XdBQ`?eZ^~kR*XkYRx3)+|b60Oz|msL+(mfDu= zsz+|A?K*VodUR^R)T4djyZDM|$Ab0;K^s}nj}7e$_r*)HT-wiyq5UkE_AO{%yl5x1 zPtcJ=`;YD(GU+jBvJSi5&_2EZ*-S3&V_!RSkp=B*KC+?xI%q$1d1u-$b>`%ja2>Q? z2VF@Yl1#jTy-am3U6r9PPea#2o*j~|(9;g^jVOFWeQbtri0{?OK49E+@C^&TQHNeo z@A8d0_(r|#17K5!9#AhG0GQOlH@3nzw!$}3fa5Co#)t5Y58xXqz-|?~L_PY!uE{;a zTcL-o6~tEzPC|D{8X^AH2z`tt@7)XJL|;S$VH}BeMc!&5_E%knz z{auXQj7K@jSXBd8e051<>$5oDs;V-Vu9zG14J^a69v)9Df>@}{@f!C%T1_leCOg1?lzHoJ1Blplu8zVt9{cIoP}*`=$OyEeP@ zblL3E&&yq#UAlR>YqLu)_iXm1&vmxhWe?1OMk}Gw!fl0)zF!WF?w;HOTMs>yb<#p; zAiBf8wWdHhf&JB&;~#o~97T)B%`bYq5xv;Z=;h8?1|E$jC7{t6(9$jB?-h;a9dBrK zjIW(W7q!OH=;a+~^eej~js9*&JB?1GUhV(z&Xut&bRK_bR3KpNQt-Mu4L_{*R`8wU z)Af9@-rl)CV`s@m6h8@AvQ>@0-e$k6{JF9V#Cz5e>m%QMY#gV`mBR0}{z82R;yLG> zU}z68(rh}Htxwf zjCfAU9*F1UUh7cJZe4ZweE-e87tg8wYyM;V?(tG@4A`VENKd>3-Aw(`TGZ{#r=DHF zGiR-%;^6xmU+)D2Mm=6!`pTfuBRcW1 zm^f~SkKyC|C=(iwd48yzI5WWEV#;h#8TeKseCAE=9X`hMHSY6Oy23Z`oi6j@(qG4! z>fJm17#@`3(nP?$Iy^GwUVQ9NZe8MI%eeRWn8U+j`mMDRkH)+)K%WjD%Vw@rxA<6* zW%$@Y(cTK*`h8a>W5agEzZAf~L{6L;CS8CZ=}lyw;JtfEURd#XKK#Js zN8yYzatKbfs!t~#vmg1)>U|kiPTmvfJU}_`jEhEW?alaNmlKCu=<>xvhcB8uTz{ti zUF_k?BsRBcf7^r|tg(n^yv5jc-9Mv6ds?{@e3SGuAv=>6y1C#omDUd^}Y&hQL# zRU2L$i|$2t@4I!2?ly5Rx)YtMT>O|k+S5D1#?W0_qySv<#_8eLnLH!kBDoPd`g4^V zp&2L))=`jz?R?mfQk$&L?bzZe`lNo7Ps%=Zc0dopO4`#e|II2knL=RPXX0DB zSFBk*Fe?T}Bo95f&ZfL~@5yKH`|_CYZ*%*89(_+l$6C9K*Z;HVM?OF0(%FIR5p7BD zx4_i^ysy;PF{h4vY)6CtqWjC?p;7#;;@R|Lf)k4{Y)n}3rk;L1fj(tHE2=B^4a37D ztQ`k)tnj#1M*mM)YGh#PAIzDf#Sz=dS>ezMbyv~{_!fG8dSnlMOvDqs_8RdwNtV2l zY@MVs#jFXMd?D$PWWfd7Ha53GqeJJ!Uiz#*#^-EPJ#&f4oJ=3;oc7K6fWPM421mz% z2GqwW@`mxs);S_$&T+8{>_wXD#p{)(RfpF}_Xxjn|v9;4!oKUi=}XJ<+M5 zQRUEW6Z`RYX-yjb`w;I{_5;??V%upVW>C1NZ^WxUWF22)u$gD!(lx5rc`v=FJtoYH z0~^QK)Na&oRZB)W_Z5r@8c@9!bfJ1*ck6vb<3ctiT5mM5d*C7QMm6Ys$_a2PIbE`5 z=Y^-dV|bnLbp`R(W66*2;cOn??qN;u;s4589=dBxc+fkU7fCnMw}bh1M9jC1(1Z4J z-Mq;P-_+k)?&TPleeTgoEM98u#gk>Ci|CV9IO`=-zkjZo!`ZXj=duKv_%(AGdg9Ry z3vZ2V51sO_?RlSnwO#a{wRP5px?az3&+@;&z3zABne^jCy0Mo>SNX-TS;MDswq%`e z^rlMct)bpZ>Mf++Oya`oHKrwsc}R=&SZ_VJ=fg4KR^ksV_IB9twx2t&N0)N{2kGC1 zj91^Q&K&yHK)-V6R|EZ0oi;GldB-`-o#GfuW|}cN?G#wmw{HTclk>uxY4;cC26bJm zQ}59TNqX(r$hEiv|4)5vjUb_wW%EaG_-FBDUX8QK{ zt>(ITi#hu$6{&M?)pCe}KblVw_G7Van0AL)a7 zC*3W!uWiHwi1yaAHmCQEeyjRQhyQ0p*6_?C)=;>^b-k{g^=-44*U=52lh|C#FXDqP zfe(~QS7Gm8hHWiR?_!ZN$nY+cJE-HC&;iRHI>%bb8cOB(xz@s8v1j1A!rpWG2kkyT znrvCuW%aJ?uYSbyUniafT!a7a-O0Q>cCU%M-GSYm!~Wty(+pgj1{1rkT#)o(5irs` z#`bw)Y@ajiD(hqG8e~{)4seL=ug6=((}V}TU70x(nEh|+g6~l8|E4Zj{>h2$hW2e-+IpNa`fZCki2jTG78+=yBoN-k#Ai5tKk7dYi*owzMH+#HS850 zG})b>E#f04UuQFRK&~CABA&B41wOvjKQvNQZ{MGa4*ePNR9lz$!!1uHg+Dvd+S^Du z<%3&>x~*NXB%;zYNm-tk;8H@JX! z(<=KIV#1`Kh{q6rD!uJQ=qFaM_H=3wFUp}k&-Yq4#;Sg&kMT|Hg06MW%Qx2qvF}sR z$5XK#I-UVLi#4U(e!i-Jg>LYq9S-Iz#>lP?=t>M)I{j;QYzy=#Xg zjB?Jryw>Ksec@bu=}}JVlbYm= zxtZMO?`@m#D7aJV8(5WQA30aCgbn0S>tpwsyPLcg4ZgY$0=?@#DD6>5Oz@F&gZtAL zl=*|;jK-Mwfnf??GPG4Xzvey5Fx(_wZ z3l3wPmmcOg2gHrD9(fl(PVxn;A7Brd`!(Y%^>-fUN?+n})=g_4=aK~D)Z7mvW}=+& zdU#Wa961!;?1V20Z?;j!ISU4TU9x2hIFJHQ(cDf+@XqhPYvx_-*=?Cn-*K_V-@l9uS@Gpw*ORs9!J$RB2ez2H6*WGM%p%Qcp#Zb4f*Rzyzh0yP! z{^$tYTjavay3yDSgU&L1rUd%`e4E~PIba{D{ui+Q;$@tduR=8aZD$!20q?{0x9{5W z?f!Pj)r0xlFLrsI;1vkbmQxA`vq_9uYfjqrNKAH~m^=1OZ7HtUVzQ8!Yz^qW7k zHd-Tn1esEUuHflI`bPNt7uHgZzo?^L6019T-~Q44b{2If$^))GNH4MAnP>A&o!xl4 z&fEyxX>itw;f*KsyO~a za@(*ZFr3QgqsW4zp}C8x=uW>Z*7JTu`-jV4{<-38j+7oH|s&nxfIao#`Q&~e@q(}W!vLcex^E5fza#5H<; zg?#8+ey7Kv$wT?EW#_5v^~4<=%zu93JBIFev)Alx3(09ik^UIIWIX~`B_E5=w*o~g5966$Y`D(76&Nx|56W!X1L!Tmb#^cC5xjnK4s^my97G1=^GfLEk9Zp;hIMkaouExoP9=$i{X*-hGGb$D7A`qj*^HVfgXxA&&I>`tj~Xx5@gY;hlp|GPpM6hvuq% zkMiTSeR+USI{v^9;oaVv-388lea#BwrTlAV-gMrU5B^AA`4#LP1$NRonvOl-4`E({ zk?d*4xCk#GEVOt*x-W8JS1_@UNaGS*CRq z&)5CQKO{rbB6lC;ebPVmzGIpD|3yv*wM)Ns-pAT^taRy~#qS)Q@LUJM%&t~02qS+b zTdaGmR_$d~d9N>md2IiMH-YbX9z@t}C-yvudF*GXb9DI5=RwrHd31;KAe{dPIS*n6 z^$1q6{q@d+7yxcTyW~h=E||;javsFbTiZCVOKb6o^Jwn`2edvY-9tR0FaAc0cIvU= zXXU!MIN11JKao$wT`vg1ANIok-uqGB^19?P;W4>}KEw~@du1)@z_<1A8J$`0`QSz~ z)~r-(S^5MkT+^-1!ReXkPz_OohsBGH|1lB&P>ySlhwP?(i?-zJ94h#cd+x}eTH$_^ z?67z4)>U7zj||?T`KG>3yhVIAl3&`KAwIapvu-AeedD$Ai ziS2g{FigBA(Vn|E+t7gHL;8Z4U)i+sdvwqb!F}yRF!S1ZO+opvBv(plPxx5N{<8qM z=;@}ZeVI#lO?F|5S?{PJPhFJt4)HWhUb7G$#YF%~q9<#3cp~_%4H6l~rx5s*a159`!4%`yAJ z@X;w&dd{z-0r{SR!34D@d3iDR<}UP;njT|Xom@8LW79gVzNrIV@&^v1e+|?j+ioRv z_sXxWuOb>rMHrhaRx~*lW;kzX+Zgk%#ZiYpnzZ$^)m| zhwu9WRsBoIr9yu)?IT+(_~0IL0ro4wUI9**O|{mS0PiTiu2yKUhO+9j$}7*T({K2p zY&!LOKQZpg_2BhEdzIA(?OD~{)qp+oIrUThj{+~tzT-J^<38f`Ka2K_+&FmblGS|l zwUVK}<%VCB;Ded>xrv(;4Tz^hlLu_B)15esv`7TGqxXuJQac&wIo9+WTXPxT3sbD> zJ>*lwmo-6tR(+@W+KxS`{w$#Ep=d4q@1uXHJ{=CNbr&wj#G@UG*223DtywAq>>EW( z_|QCB+sD10x9S>4YdbQ&Ukt-1Ze60a&D?({#ABA2pwp3hHyg0JA=l3#}#3or@Z>dAIT7P02p7#9n0G5yMOV*ysacl1S?zX-S# zBo?4~rIA6Ah{3VLi3Qk6ALIQ1($k(h5_tu#Nv{9nGo$0j#sd)J?!d3i=} zXD0sjSh?ZFugm5*R9yTZbcoov_;`OoI_HgK!Bd`~4G-U?XZ)4_;=OiUyP^Z}FU`jS zzS#&a8d~k>7tLEGY3z~V5|g~ zIC`X~3wro{JAJo!7pOD+%7vch-e+hiz}Z!e!+ef!G0J(njRi)>`8)U@W$fY+f?*Rd zDc{$&Y_s?s&q}E?R`*HN|0ulAE3Yv)bufQk9TTbJrc14`awr|FE^qv?v=&>hbh!9> z^Qrene3#mT{u27H^cD{mO)X}PT{@QdwV|<$$UW)SgVK5QKfb;nP@ib34S8n1x2mVQ z@@pT@KXS^M}Ang z$ne9Vvl8<|?|dY$O%DbOfk8aY?4-~4(&q=zX+q9?wCjVxg?VAEi8}Pr!9MS(+m3ts z@a64E!259J?b@A2-VRY2SKelG@5$Sv-RA>!jgz;1)4!j*4Z3wn-X?SZedO)+z|)hr zkM1<`Jaj+!v{(8`U)DgI z(U~TJA8mUkwk#so)9&qeY}|zWl_K>Og{LQm0=6@WxE}T;SuMECZzQ~(^ zZQGtpTNbf4{0{X{j(W&F$NTBjlSNL_(x2sr%P!3eYu)~)U)#zv5`KaFUdlPCdTv24 zE*1am-mgk!H`C_rjJed673zBwU*JQ7tT2Hq(0%*4fvuB?xh*iUy+QhAQQscmAwBp; z>h#(>l{G(WfuV&JR~y_g9pVt13{At7ZX2KiRgW?6_ zxjKGEcf9XzHGCqr-)-m#j^C&l8-SRj6e}-089!#go-^Nu|DLg)bH*zY$3}}Mc>bo9 zuD;QOHD}GM_;tXt_AUfQvMp!vUUPQ1dX*1-@Z0#v*W}R5a`mdLW>SOc} zF_y=6`~JS!J7{aW*Zodt>}nLzh%5GoE+cm4H{ztTo4{)v3F7ZW~^_;k5%)Y z$XB$LbppNfd_@h1@D;VB9=@-r)H##0^V)#q(^c@jZ^NsfgI6IJuf|n$#H+HaT={f< zV!TR-KQEd({I{AiNw;)_^O?Ze!?7>e(^g|}(^>EB3(N!Phyn7(1p?OIU>3TV{95o4 z<&?{`eJ!ac&6+>6yS1$u`*^68w|pLY%)vJ~gZ*#TXqpGZa<*KUk zH4hwbr&d+*&D`3RtDbs!-KuUizHn{%+6PjX*`3BWh?txcOG{g~@_iNMu3Ech)%kY! zsy|Y9q<+<^>E){*IG^WB?VhV5wX0Tj``emT-Dkb^K!9~r2R_8)sD9NgI7N52O;C<8 z!NY^oX2x(8v0ujnldBo`yzI5}=Q76Wz+|T2$orJX{b7x-l<#uLTR5HX0>C3huyEf= zhIW>p{sVqyts6Aqd%hYzFWPyIJ;|~7A{@8rx6Ys3#oC9nraRT>zh^kHlj)JJM;s{Q z>}xHdZ<1?2qs~~_<;g9zQwmO&qWcx&w_Qezn7+T3?`?2H@1+xO>n$HfMr4{(kJ+2` zHn=T2ayha5-xhO_OSzEmJtyX%@Jb`+n!&|rOw7StFtrH)^6W=^%vWjuT@Jwzh^_$n%eEd z&Re&=<(|(-C*=T1g0bHX#*&!4wud+oe0 zwksAc$L_wWZJ13gATfscVRHgnx6yu@KqN_aN+x;kcVrINGjx~rvJL;EwE_IxE%>`X zJG~&hv?(@h-n-pl`c>Sl$G`j_>C3O?+LY zJP+`lRla(acp`Gc>^n*zKeYZSS>vrKp9W5QJlb1R?hDSH0t_^tiFmBHru;8*zwICn zbN9RKg>?30J;0tUlQ(ewS|h);ro7QvQ#O0G1l#6qMwd-gx8$1El%MA({MK5X%KXDE z^E7>D{jk%zWB@&=5;^YJUms@nl3mMsl(RlLi}DNUkJcy0WEy$k%s;ex-Ad6BYe!t; z*C*@UJxp5TR2{MJr>w;$q|GO>5#wzk^~Lko=U!}R>u`LC-}E|so94IMj7{^U%D6Vo zXWV->4QH%7&p*{Q&ZgPS+1l|wi^K8PzwOo~n`S-t-^Zqr{Pk>_E8RIA!LxXuO)=vW z|JKjyV*KR6Aoc?D5Io6R?(gk$_ocH1j z@U5Hp0^wSp$a7P8=f)SX-akQc1E=#ndXr-_m0|}yr1b#@w@jI_G4-*=)Sj|GPXp6#PGqsV^S>XU5d$!GDG7J6!n3`IZg_|7GOr%Q-a;{v%!Z`3&4X z-ap9UONrqBIOPjZiG%-tx#cZW{_$^2c@O?oz(3yq>&1%+|GzCB^$+O3hZ`?;@%!H% zFDBl5uz1v3WX1obc(I9&?T}t2Kdcoa8+H>z;KjGd=jW~Q+raNYyx0ouqv)7#Kz7{{ z_Tnyd;{YDvUV0QZ{ImJuDO<=ZfX=6wPOVoj(pneoD!#Lkv#pfRv$0~+s&uNz)_CX|oyL_poi9ww3&JEv)U06py0qz3_Xl?QGUd zn}-pvf8tr;Q+Pjmzkgd9c1`S_EIVp&Rb}N9?u&d~xP*O6{Qr0b_HbA0)SaxoD<-p% z+&`AP|Er0-cMI_oAzbnOlAkRU8dh4!Rp2q8Bcz!5eIwk*_W68 z8y`?V!xL6CnfhOAG}py~1?T?G=Nrer?2dn&%CLva_=(A7XZ_;$<;YvGFuJSkc`CoO7(^dY|%|!oNt|P8;*)avm^mNU!(2xvS=(~#%K6e=a=Xl#-!z!D zNN7}R4-NMgw663`CT1YmQUaZeE=%D7bMRM2pG*!{5--ra#IByZ=06S%G!F}eqr-yX z!VNCIQm16SvwzC(s~*RH?VVOVyH=Sph0s%cea-Nn=)_<+@YccWIaT%0ez?KZ6Fn{v zu3T%rS39xo<~slVVJGj{RPr^{k>vS#|(yo5a4gX!!0awd;cujSyW;2tt})M zo9vkya`x?f_w2CFwexu2VxBdEuZlVAga4`FU6=3S>kd3;+A1b@}XZF^uUT|zIW3Q=C6>1+o@nh+Aem^!A zxo~Er68q_M&Ms+6Qi2F=ZN}(tSd&3W8VV)?B)u7mymb-lH_pV%cj3&8tZW* zIWKe+XL;HVe#+L+_fftL{H1-aV!z9E%AFaO{8)&+_e>q~hx<7b-8oWz^8(dDPW8Hj zw^ef-b@q6lMVoz7O#*G}+n9tTcu)odAjqc6GXr7CAbdW=<+vlV5iP-r#m-#3{ z?wh?zULBi=;n6(A@^a0Ac)2$R@%4>RV5*&9Zf#{pFl( z{9y+@?i!Vgr=Mrr=c=-*^IYwo$bP>#8tRf@z81MO6pZ0-e)nEu_hK)^>okV*-TBMZ z{8dPH_kefyWDb$rGkW8Dl(jXOv5$)QK&nCPl`3%|>u!^`1M54O+s0XY`W3;DL@{`Pr_{Z8`~Jubz_R^;7P z%+oSgt_Dx^<~iWV)!bjXa`iYPE2Ec74#6icPYu^BF!hI>`nn)b=^ORUasGF8cx@y7 znn3;W^6Hz%(N8~q}dQ)ePJ#@Jyq zffGKLWf`${*aK$X9z!3gyxPo>>jU0@nW6m${VBhq1=TN$`>^s>v`3^ms z94}L7SL^9|uQ9((y^7s5-;)Q=T~|3C*vL0{s52r4IXPa<-T{@j7qS&SGMxSWj1M!AtlJbL_sX`}Jwr($nbXmfg3?oQ>Tlaw&bN zL{D$>^{H}l2~ftX`#S0_r*7%tt<>F*v!~RSP2J^uHxqxk4gSRXz~gi2FGu(O^ATxP zGg7SeKYiZdMGCM`o$>X}VBKN_cre=EzonSGfy6{@W9`z(ao3*bAU$$!OuyzRE;6oP zmjg46DYnd+F=c{`CB97j_dkmHUi}nI`!Mc@8TaWE1=BvjG^1q;@$4FlW%mK5=6rM8 zg{kr<9)M{p=Rz}I1KoP-@R6#`fNigqpPd*Iryl-S%}L7}G1s5BJcWEB!QR#g;(Q!< zdwsONZkzA{M(b&!TTlD1|=-i5m*oWzN~5**tq?g#R6l&rUJl z$M*ZW7R5w%jN266iUwobzcQx%3u)h@bH%ZE{rDN*2>w3=K8NyM1Q!_kn0pxeA{M7; ztoaAd2|ZM-_L+4b?l>wfL$-AF>Z=K~g?(=(ejq_bprhGr&#cgg~z<$}y+)E!+ z4sHj2G5waDv)$+37)l&GOOJdEpY?nf-CY=d#53{CL&VP97avceHWTG1yOcBEoc8u?Xl5#Ccp7w>Tti^rf;En#qUkqM1D-^79KG9vyfzpmKkM=Xzzz zfvs~+*RX;PdmjJPj7)cFFMMT3+6$j%Xm48IL(`tuW~2XANB3?lGjN=GiYfQ_NiO~Q zWz&Yn|E95>{zh}JGZ0%RCu1*lMIQ*D50JmQz=ti{zUMJC&ET41aL)iv!nF|a3|R4W z*C&$9d(p{eV#X8U*GYQsFL3f{*rGf90>#Rw!TfspG>TcT*4%17AH*i)T%_-PZ2tY& zdgOoJ%~{Oqk8pVrG;|!}^Y}+2bym7`R|!67f2J2Fuo<0n3pkXh?(5jY)K9fhMp~%pr{59U%LL+c_2f6xPW7?)suV6Y* zFpbHtBUn24k3H_;zme&k$0OJ@g9n1A?my#Rad8J5e_Mfk6DiO&{`VQ(SYPrxK6rcy z&c3SvxksbSUnBGSV?TTWzl0U~Cug(&%Q^Qa5SrXt<411b&qKaG8#w8)59H^Oz0{|C z&inxUL}!b)`8m6pv!5GM`c$P8BdNZLSImqer;uan@4~z9sI`);+8Y0Vw+4FA=7&Dk zeM94SlAo`U9MftedNOB}Qm69I%P&`hO&sR?MHP3{Vu$U8U(^yPU#&8m7K^s+>f`8l z7JNHlzoCi8$gf|IEgL=AjKRkkG>3|DuUro-@muJuIQj02*%PJpX|2capK%^=I~Tr) zPrktT|Sk`TdkX8~oMSqyK8BOJ`qfFlJw@WG(H=MmilB$p#7vH<3$;a+<%) zJbSTrmxIe5-^qvX6k}Jd#-DLG`CI(x2M5dF^2!Q>SBdhs^l>iX{VisN{MnE(ipj-;)*H31Ba~ya#7rYq_4xUmz zcmAA|^r~ENE(N@q`|DK;Qh{lkKc!`JTGjlfv^n#U&&$fhn}NUfHM0(i&BpxnHF&>e zs#Q&_;DJ~o@p8e}&|t^?Qi@F=+kitaD%TKGu7sFlQ0_z#E{rnEFzB;B{EUfQz$ITJLe4$Pqa)jU-;h4 z^E~!@-L!$YY%qP7jh`^qeZ+uOa`uw^#yZ1AW09{>zPMg*Sr5vm*q1$M@q1SHU8(cVOzgJN ze=;I_x=1diMXvqGdN9$sh7K=ctigcJR43;rw3|-M`RU|;$h6a)HHuy6v3*L=E!l^> zXNt9cGj(cDoN_;uVh6{=ka@%}U4RT71H8za?VjCU3_JwW_;)t~<0xa4Zl^PGvfvGU z*&kOo?dI0M(-v)3lZ)X%9tNWWrbV3dJt#NgP-7e7!jTz39iEY|p); zXu;6^V;&f{e;m174{yv*{4bAr_#wtT_aI|V1nUs6_TV}g+7Pbnf*%H$&l+%vc@5_t z6Id|LVw_#9^;`YE)$G@q>*X9)dF=_;{@OJ9B;4P3irol#rPA{|6tjT!$;e`>$1tI5guf% zpGH07?BHDGVm`9$)Zrtttr7Z8>m(uep35&8px>p$+kOVVd2I)2CwG6r#-ZRV|L=my;vv0tT#xz_V}*phlz3?59k>pqxHp5>zM=eqB= z4sWqqPgh=U<>W5neUaWX-;^DGJh`CH&$iZwUA*a5Hgx#$;7yR6)#LV?eA;EUbwsm2 zcy8_;c@N}a^H}ANjmIuxFo*FA87oy19}M+Gj(n%#JT zl@#JUNpgv5Ea3cT<+gG2b?4*;76@0fkFly_&r#9ZeU=??@^)tHe7 zdkgV#d-bN~urFwZ&3-fVRcNE^&b-ziFxGIry&+(CT_4P{!l?mr{Mg-E8qqIf>&XQk zxxhpCpF+QK_oplnJqNMrMB_o(SZ@F2JJdK5$Jqy*Jr0@dldGo>^81|nUdO!-M@?*( z_2qnH{9R=4+|)jEwOK zR#-BnvPVIy@(?wlqwn!|VGN4tFgXjKKN~qJ{eOv7{lz)RQodIXqhQRsRyn-XS=Zv5 z?I!0_Ns4?}X_3IZ{8p{W<$iU?#;wd%5S`#$a3B}Fp2@y?M zbtc3Ap-tHqItxQFabbL1icg7B&i8H^`IJ`PYJIszI@j}qjR^4-WsID>YSgCGxkR65{ff8UmRcwv zcKzUuBOHCUpR3Ov8R_flvm>Tj>rY@@$_XKTb~ik`A-Ru}#|(O2rtwN=U9+X_k;i

<4(k9fP;W6Y6Yz=+bDl9g z&0B+;Hqp>S?oHn~JWY1Z9G@MY>-w$d3?i-<_%{3dt_~a*SU}tBo6&R2&~wM3=LYT= zw;+g~o3p>LO1atk0cYuAjr66I_=o^HmX$%?9$>38*7q|0K6^R$z#adhR+GzDv?avIjc_OT%0V{XGAwgFc-S^qi*Sg4jk-^s*FWe^+NQ!P1LP6`b{-Dd^+EIWny90 zIM}N00bOZ-lj>{p^>b{2eze&*?UvTpsE0Z_*#Zy8%vp(iFwp8L;Ep$U|4rZZ>_nc$ z+oj)Sztc3zAMSpqj5YtW{Z7}BTjKxHey0Jz?z`m2etmV!ey4)Y=f=oxZEG_=0OZKt z@)AQUMnAEtQ~%2P0=Oa^cyEIBV4mMPbr*XEG#{|Mr~!o@6;dJ z0P0VB`=+g@6SN<8+n=E^#N2K>Cx%$_}1co+C^4)(SMu1#O~cqCV#y5 z?SIiX&tEGUUc;E}pL%8L9ouOivQU0a@62gO))O;yI(=#EX)TAhoYD+$(0!KAIwkh3 z>9(1^aG?tqN2VWVhYJ@Ny)8P^ANGA^)*2t5Y}(X*C6$S~|GhFb|F6BwlrKz~vF3--gRfU%a*(9%^rU)>hN@5ceCezol_yavp3i zvpH(Yy!I1QZ)|@D(qHXO-_0Jo*u2@o^+)KV@Z(f+w!AQcOrchRfPJ4!A1U zlK0Jc=Nm&m)U`MCnOVEM3?E@mg8VWm^mQ`waG}#zb5`Lc)FJy+eGFP(k>@1XG7Fng zehI<-h9j)yWyrc%zmc~`GD!a+Esn>csdUBj0+jGw+w3ateKX5O_Z9 zz%xAoJk7Iy^zr#4@|LTQ#I;?Q6wKdOf~>2doNR_{*+1}v&hz0uYlqW%!AX26HpXqx z2L`6U0&el|2JkNH_JNMQ^J=~sS2lLvLS)Rrd^Lea=C=Hm;SF7_Q#4N5A|v=6>DnRn z?6+L@ue|V|*X4&_+&sov7pY-=>lOAzzVaU*zvY3^UA>&Leww9UW>$Q`;l0luu{QGN?oQ4j&w3 zRlnj(cjSEWv>RIC9}XVCkGBadRc4^6y@)dkEf(b@q>wd zKf+tt4L6E6P^VzCi8fWQ>8q*tBKrCW?P(18Ts60fD?ZrwWBFcn_o1&I&xy|sBixZL zSP$OF_bj<(onq!w@dZtOt9m~1%R4x8Kz4Nl?Pj2}?FLu0{`0~hYg-m{QAaNCcsOMP zr}yFCdl=K(jHf(qJOk-pj_9$Z|F@p6_TQ}Esjtj|gR8Ny^zzJT{>6{vqBAb53 z!q0NxksjGVpJHW{c-i6h;|#YSQQ9bl&LpEtebT|(a|5QYH2g+Uw@CC^0 zuQ=d(@f&x)T>-XvCNjF|?}H{)BGco~JUyAZa`-*=A7gimwkzGTY5N}w#I|p}ZrT@4 zHA0U8_rKR}@i%P~K4nhoRvh?l{4ML+zl*QWflo}^4xg-VFIz@g@rT?02XOu5Q+IyB zFBh)c1lJILkJz!D^0&^g1xF{;xX-;Y3bu9be!b7Ibp_k{mw_!l2?w^s3_JQ&f6i`; zgY6<(GnsWqY`=&IZ=;F|P;kxV|Gji6#=oyUqvZdWTQJiFd6zM7-+)U|5dtCJ}B{ zx^z(K(1D4Orf<&v&8&-z3>=(hc*>9?Vq_pTRL^%i#<{j6kcsMJ0NNLyDiPj8ADOJT zmtuppAqszqm#^cX)FQ=T%7*!?*-WB41th^JC4B-P;p6^6u>w%MZ_WS1_p~F&7 zqVl6D9|Bh5ha(lsI;3T4XMB;EnV7k0e9VB6sr4)5CKT=}^p%{JYYl7D0Pi z_~a~fH!rS2|8>7#YnK&?<4TUE4auB06f4H{*YqDBt`kS8+}sC?6*=WE#=k3>xs0Fs z>%}2dIQ>r?D{`n96;Gzd*LRIuUp{>mPFm<0Ys5>zo%pqfD?W0c<8vyfUl#IJw%X6! z=aRP#Jh#y)o%ejF_m{fw<7JdX1K@oF{4SiI80Ta607y3|W?mQIQ;dD?#nr>-mU(L^ z9*;epwpHd{m2uaDs<`*ogDTzUcj*ewh3LOmwmiXk>%v(>zviksg|iOd;a)g1Sr-ZwaF z>Fn|0obyJ`R%~RQU$P`;T9$Jr;}r72XSED)?CTPAdY#F5mtqlgCL?7>G9Hx+5zB_J zuEk>R-oC&Nzk|O_^%NHke6)@@FVW~n@Xs58hjg|RD0jVcpB5R${h@q(jhyMIy7LuR z=EsNZW4*v%IbSq%G=9Y0*!1YBFBG5V+I)=BCWf;Mc`A2v-gYZ-X_}KC;mgY5x@T&B zIA?2?(EmS8hu>H=Khb>l5v zftBLY=Kl*`NZ-8wXTD?Nud_ZHaIN4o=owRg32Q2&c$bBLuaC}}3807n+q|ooD8IXv zuJw<1!D$nh(btJhsG$Dd?z=tkw|C(GX-?kX^vFAozr0`M0{RnM<|fMQ;tbMMw;c0; z|E^zTtn*!3`VC=;DwUpD~8<7`-Jy z45z!#VBxXGmRk}vFdgy_bKShcs?KNs)G;pXB`eg%rO>Kr-<-p(=S}SY6d&{Q5DanK z-A&&Tt@RZ`C*rf(>!tn;V9d+lJ;IrO+`j;S@aQ3(|E0o3&hFYd(T#lu2d5~m?{3aT z$2Pf|c7$KCF(_KEyJAy&j>$>XBR{>`Rhtv|uW_uUY`h;Kp{F5Jbc zHTGI!AEP4&SRbuJH!jxsE@`1riZN&&X6>Cr`(;|+BoDQ8q+-5NOuP2MD!yDfTEHvz zG|wK{e4<$sP~2)S;23>!4Chs|=0?n^^UYN3KVZy$z3T4tZ6`2Z27QSyo(gZ<&3p@n zm;T3$TQQ0^GVWsR=R|(UydzzI!777W_2AW6+EdYgphN4gde6}dN1NwA6c6y8H}QOg zy9Xl*9LqFT+AO8b|E7OtPSYbFF%N?CB%X=p5}nhhHB`a>Wcd2^_*!m4ht*g*`%28S zms#Hx+-sQYwwZR=!9~tm7vFBU&(-<&f8?x*HfcR|uvPsd{N?rOhG(iji|Ow%%i%R$ zeb6rPzUVi8>a6c?z8U+Pfs1(SEWSNuS^JpSoB4Kfz%Syrr!Nw!HA=5OA0Q9Ci)JTmB0ECTM+v zgNyPXP_B*rxr^`x9IE_u7mnCojvX%@RIqv2SCx?eip_Os7zv(4Bk8H8mZ+$Wto|CcP`5%3f z{0J|a`i1Mtoz&JwPJ^i?CsOpuKv?U}O-tSs|$?gzvR2&i)J6a z@vFtGMZkj`8N)c$uPTiVeEosTLoa9UJUeB}+rVU>ng7kTrtNxo$j!_@=hhS7-Icjz zkCETx0*am2Z1;O_ULSn{-aK!B1GCM*+@{a7q2-0X#5v!tpkL^E9dbl8+-qPy6PTkv zI(3=cM}@$ZxQPO@W*gdN;8w8Id^0v;u8XId{Ay$3;I@tR55Z0Nsrgm@0Ry*~oIG)G z8|A{SC-91wd!9a`dHA+`i$#<_-1Wy@ul>)~A1}r({J*sR_%-T(bg^?V{F~nwjp!Zl;Sl`8@k1j&y!foV zZ^Uyt?s@Ru#Xq+!_U*}zLhi-JS$e+VuhCH)ovK2yE;cbDKKPg)x=UirCRew{rTu1( zOgqNf=BF=n;Dd^VB3^FwWb$n28TJ)nBUX9}ux*ZcHwJ%qu5B+J#5YcC9Px1EdB18B$7J-hR#96AS3q7}g8Ow^?C3{8cfebC6FDO(SNR zw!|+V8bjOyZSUj%WX_0dQ2zbF%aBakHxanUO zOSNr%Ga27t{y&zCkCpny@U8ORT*}Ut`&Zb?>sweAayMKx^0>4C$H_(XBy}vj!sfD>o*$;o1fX9bNZXxqi^ik-uL#vf?Gx!y-4e8{lUSEwvQSzpgYd(TpslY z&fR@%!~8|a`j31zdy~}1(1yRD1K%*9Pu2hXb+ke8^NYZb0PzX05dSXRsr22}8nkEL zFS{ZuH*a5wyAR-F@}NeUk7Kz?2T#<7pKtGU<~BVtobSAPRmV-V`3~nj$`?a?_Am6LuBQ=dStW07$BJ?6(s`qllBQmc6lqek<2yr}u+> z%4xD;GHvfZ2R-oav9-GglQXM8_rBWAC)SPHoEaLmV=(!%7*7D$Hs0N(cJt|>Q6JI% z&9f@!@46Xy`GPIQ^kq2XXuaOryYsfP*2iA8M})|$QU(mw?_0Sx;}hA%ynl48foJ@B z<&Y1=zbAmNmw>n9!QYF)5KHljmR)ZPh$Liv6J*{ zo#IDUh|bdKYX-E{ha5D9wgRCsBbdumkWJUsQEwXXRhj)WF6G-FGnNU&yPC0-BwLP7 zr}y9dO1?Ds`z+rG;c*IkX^N8z*ykFkElwWGHOQ6u?V8MW4A-t)wGXxnzXI<4T*q={ z{=tLdWSi?4u0F1LT&>z@!PpV3^IsQYjcI2E_{op;VfF$ux6F^`OR+Hr>#qX`zTM9^ z`u0or+qqoZD!QN_!((Ou59Z2BQs4cxc3^C6Q)&{|e6Dt_uiKc~%{_fw^SD~IzK^YI ze3ksKl3D_!?Kx;*-m;+2}*vYyXdT0Icm93b##3Ei zoJ|g-v*~LizUD?^Tib~FWKG05hs@%g_pA?c0G_l#>mTwF9>^ay*sAUZY_-ncj82ww zWS}bZ4lA5_u^rCHwwCL;c$;ELKk{X?)WO3PyQV!gs>31SjsdOiRhvz;DLQh*3kX8F}xA4|Ro42H=-L_-5z*@|8Qx8tUnyEBo};4CZ8c zaz@pA-CA9 z%KkiLsd)1F*vfl=RpHFDoW1Lg9zcCL!16)r>EqV(AoWn!HpLcH`et;Hb&3O#%=m!1 z6f-lBtLED4*Hrk|8T3W4*Sek7Nu@{3p`9}IpLT*r2CE|IE-(Hp51E~EZc&DH#8UL0 zbJ2T-q4%_)_h>xiO9K~!Eep|Cy5WPF*VeY|o_yA`M&<2YcSq`iI&|?&t8dFJbmn#a zt*SSP>wYWSs@`V%s@s^uo<71|ke_RU*51eR+27^w9_h}$M(uCRXnU+deGY(s8hZv~ zC;$BYYFFcbf$?Vn$4cK#3CBO*9e?3NMz+ac(#UzUzV7+(JG-{>Ve>pGBd_-3zP{Sz zVBM&H4+xQu#H#)Id|&O<`|a8(DWOp_H(R$_@Y&*7cawLda^%b@z7etc0a_=I1AhvU zP4X$2TmgelF+5K>>Gl#sAYAwGxf~v)xfY(D?%*l(2VF>Rp6udn7x1>>BZFV!NdY^z z)xlTn(Ac=Z9QMik@CB;w6wW>x#yi0_ zWt{Q8jf3s^9SmJ0IegR)4koc)W%m5;d>7u&nvv|1hn@Fk&+lhE&qGi7-`w--@ejt} z=>8V+Lvym>UF9Q?e$rX@59A$8KQVgOH`k0mplcCdcR)XR`&w5&8RHw#aI3+)<^rGN z!^>j*K{z?6JNc(>YsbkU83rfGBU<~(0)Oq62hb^5dr-_X_G;@C^qXV5_-l&?v2QI~ zc$;6F)y1wY@r|j?n&Yd@?j0Ic;>)Xre}ebYanNN3W4HGYjWRlph3p^0_javtywP7j zvUm{h2G?=Ddt}sa^c}3z2|MX)BfoXXTj?%!T#cU7dED~l#`_tTi;kff+*mpz=W0az`vq=HJ+aAYmajAg*wr1V&A{}oc4a2Gumuyv1a;UAwP7k-CQeg z#PQ^>pf+FX7FMy5Z0GTTlP$sJ#1sWA&=~vN>-= z8|QA&p9z_kza@~WI1+2wSJb_YHPT~zc_W?~*?qx@eXQYM?l0Wfi+rOc$d+r6Mf!dV zvL|!Dd#(~Z+|#$zFXS`)!o-V@W86XZW~Rd9b9Eo{t@EFEy}Db=^*;8U+DHvm%+IG! z$7kdXf6Pvq-o(7lw5=9=RTCz=W6UP6vTRX(U&=T$1Nf47AN(eNV`eXF+Z4*A(5H0Z zGIZC>`_Bzn>p%0SL_TMXPuq;KOWufUZL6v`$W$a}<|j*0pIw&K1Wo9GDikTmp=j<(TGe=@dsmq;u5rFYS9`OTs$ zeAP3M-OtiZM4%=A2&OFpKv!JZBAL#Hl>bwEDF~$=jnn zHaYAoR9@iWB``TOOWv^GoO%(~%7my#Lb}%QVJPdcXBj z*#_&S9F2`Jd$!{4i4%b3CCFuXvTHLwY+0w(FwdFxDbq`YOFv`ZCv!7L@KRmJS|e^4 zYK{0@^R+M2p-1KFy-M@t{%-^>dUmBVXW9pmjQwik3&_Lvv?Hr7H@u;Wz2oPVl?|UB z>{eB5XKo7aH}&NKdw9w6X7)!oJO!W0xM_Wywzjf&VJmwgpzrFf5A}7PHFGw4uG{ty zQ+6-!^{Za>p!ba(o&$%>WL-=9nWe|G7x*Qojf=SYt>w+sU39jQNzI&PsPd|N%KZAsdJg!9i~hXZ8Z<`6R(mKZZ1%LUBXQ(wB{LzJpHVA9QoK* z7~g7eTYLY!GPx67SxEjwuiV18a1^St{ad+3Ma6~6ZSsy`XbI_Y4m`rpS{Bl>e5 ztM<`81J1wCewdg1>BF^NxybK#&W7yA^TxaJFNmJ7%Zo)%#D)}$o+|QdOQ5HPd~5Ox z_la3kDOimi@yHauF&YG$kr>#%l31B*V+>7 z{3$)V)Lt>btH)RUJL+hy@Zsn4SI<%VTsfQ6q0ODxT#kmLC&egAC`9bJc z33Ps-U(};IZ1^hlIAS&X9)td0KPSIUk^iyzS>VNIfvOE_M%8Yg_s4&1U$Zf~UGnLN zJpU4Z-xuh=+u*_SCynV!j2CO!#pv>x$e41j^7pPs5Bv&Vrx>p!e&_=mE&kiwy6Knj+>0nP+RA9DOs8+~`4W8OqV+A{*%i?J*sP?gS;(6)x35_+ z=9_|zWAE!e;EHr>xcmx|O@D`gUkbn7 z=;=o?_KzKyw?BUveP$*(X7b>xUot0~@GblX_&hn z)b^Lu^(MAjY&&bHZ#4Qwi2CjY?&tHJ%B8YytG+L!kBY@#$oOZv{TbP^Am@AEE z5wg{5TYiI+X-{PfDbwWclP#WBIz01Z&h};ObEXv!FPoMlSOXt&(E;z#=l~VWV-Bz# z?dkwQ;2<3!hz>A~eCRFcN}n-*_t3w4(E%<*hx&^9ZQNI*1KfuW@CDb;xfY=VApfh; z1sokfHm~q&nlH#6+TiMDe{Lk#PUHMmbg{CM;pd?P3?py31_SOwrYj~t`qWa!6_xaFGDSIJ(?mO{T$>*s&+q}wrBYvo7ukvixnvTz2 z=9%8x!_C_KtLUo*e{}WSf@JKReqrH=Bj=aQBG;=`+YE2enr7ptU6K9xNc~Ct{Ix8| zRe$CefA`gPFWox)>g3kbpGKd&rtFu)rAyk_`S*~EV_Mk>(toNaHxsJ@{>q0c-)mzk zy2BaNyX4i8S&L{3UX!)x?vdGxD!D(4`#IdtWFWy znS{NT?D#LGGX;alWc>6)(NUfMao>cGvX00+#?I|2U6pl`gMFu%>)5qU$u0S%HJ5*T zPWqAZ=U6X9pJbm8FxMyxG5p2!=qohAt4^F9b=aT15Q^{4g)Os-YDaloXL*BY& z-tlhy=HJZsHu0NlSD{lD_|Q*G%!T+BI?D3V@aS^rQ@Tf(+@{L)Fn7GGM=Z6&D_=Hs z3Lcb2k?3LY-{yoE4`%*ys)kGrGwzBqb++| zA=gsY1_Rhf7JfTp7oYBYpDjH}8o5*ZGp1z?7kn&uApXR?g25EtV`Eh30?RC5*$gkp zwgankTv!G^A>UfIwQUUT7@gMaKYx~XN~mizb-c>_$ZtDQ8(u?BXQ`78>h2tcPrl`JXNLJnqp5a)NJu$TP(dJ2IgHdkvW|uD8+Ev+cZEtsUFt z@<+kX;q8nQJ21c2nq$}6L3~sz3=PENzx>dDz-FyHwkUV*3Fqd{JMFyOd(S*SHxR75 zESDeS$t~ns=G@!4m0ZKly)U+y}T1*uC%%^oVfK#j2%00hmx^1PF+JV&kk^L^PC|^iSoP9^}^I7~Hf7%MIvjp*zr(lz(!kfCW zcGw*sQ4efqe|2)NyxcAW^K;X>jmb@)P>>5>-Me^=wJfKfuS#*JI|f+G7gh9c-L=u5 zyTsrBrB9EumVYwZTK;%J|Ce%xx$d$bIQL_6yKx=n+!y4ga2>Au zv14E|FV~+T4^a$EZ@*) zeZ5mp^7&Fv@byUTz2naD7hhcStMzv8)OCTo($<~&M$x*Tyiv5mN=jL5CF}d5#|tid zX|~lRWsH?nw9fXWuH)N5zOJdyQs+s&K`CSV%}Sen{HsN?uXwfS=RRM`vsUk+_xXCI z_M)9$zNFNX?ynrb?bxS(rT^=L_okh6+Pb2XE?ei+`x#gHoE9&)&*NQHtUnlvx41I#OpRszTeo0@xw0os~b?al} zd%g7HuXKNHpdzi;X)B6)UACfVz3or=xz#o07+;s7H|(w{JN;cFpZ2wuKVc;o_40M0 ze?3#5V0@q0T~aT!j!F4LzdO_3IR5pbH?DZS=x)aRrPXuj)xPejwRTT_$*IrWuweW% zr@j0u{qLD_U)qL z>Q76(nt7@H!N#K6pKmPsjdgO$ADELjY2!SrTk1UOoJYB<&;9NAwL_o$mF|C#Qk_iVMEpR6zH>Fb>`$mdVlZ2M@-&wL~o)iNJ1+TBv`0?(haj!n6mK0IxA8~RkmWv%ma z^Oo0gUG{^#<@bOm^T3noc2eqV{qADE-eA7o0N;D0u1k41P0xp%IcxloGnfAA3hq~* zx~XXTPc{{u;OkYi&etPlo!ztOmwdaF`lg?hxBMBdJ5SA9KAn2sir0#YfHORVpFj293HOcv;+*GxrT^=EJyW*Wy;D}$ z{-W0z=Txgp(I>&*q&;!Un?+Addb4N=^-r}@Qh&_+uCh)|xzEZ->E%lq`h5R~(&jR* z>rZ&6==xv0Q}mV9EA>jvzb`HI(^FOyeLCs?W9`i2qpHrvfA0*-ED2logqnoBN)`|) zAVi>`<afhJKLrx2@jJ2+ex~9}s^zXNCeMIj6F}dCOhr8RIe_|X)m@M{KXe!-wz3tnk zSQ|V$^UcKPqE3u|*LmW>ccGmado#SwLYdcJd8TOmkA6xWk@jn7Hxit`u0%Q~sKac~ zx-*>Ck5Ps|4*PI$SZ&}rTtMixOd?J0cP#$?=7hlR^`uZ_`jbGL#^ga!3tTqp`cbcw+ zK8D#A!&~+zJWV@q(9RpsZxZ%{ui#I&UwFhMDBm7T5M+eagE9BY~uO{_>0+E z7gaX-fV1pwe##L0cNM#R339@xPO^2VQML-zn(0(y?S(w6WXzq+Qz!FO+|9V^y44q(VowyD&)eo+NXin zc6FMq(iCr_jZ9n8k7?sCwDA}EG2Bl3(1^|6^ENram+jKpT4hSGWtlCuS*qRnw8>&) zzB;e9KM$XN4?g{#vqQDpzsGme!AXT_y3KA*()v-A)EHGY{(WcFgYSoAq1~Qkj&^Q0 z*=@k-+-`doxZeTpcbu!Q{YlZnNw3mggncSFD+h*H>O4Y>+xx`Frz59DuJCm@&ZDN$Hs~jFoD$ERz$Y+W3F!1hy`R(-9X$Ix zt&K_>c$xy9?m6Uq%@pHYXo?V+obhUc{itc0EmFo9*zTpBX2#mWSg$h2gS&{#3^hp` z?{wa^zq-CCqjw8+TJ44QhNz4we|2Wu^H*o3DbY5|9IK80Li$hz{2A0YnQxMqKXvBm zwhG2NgR#zFtY!Smf%_j~!%a{E9ZH<0Pe{Ehvo{p2tdTj!f9NIyy2*fUW|^a$t`J@X z|Jl?X3mp>QhKpqRj#X;t=lX>i99^1{=gNxV@E=Kp^qG!#MMVY_* znKn+D(2NtBaY8dL#($hSQNzU=`e)$cPTCy}F8kx+wf?vuXQF`%=d;fgt-95%wc&xL zoY0gLni>bK?dZcrqW!fnT+E}LE8$}0^k<8#&u!Pn$!FWhIQ^M%`m@txinIBFUFO0L zt_Ch9X}E~KGA@w89ZGB;E((+Ci!wg=h1Ny|G?W1iWk5rIXs9X-7lP*mXsK(gY#c57{f&%f2CDCUPTH{$H$%opay z7vaMi_+e3n^71A4IH=(xTEoX{^ufT#6yQ#Tj_S-e*b0-FOXHbK5B`;LNxU#OkYgQ6 zLLWYSy=#jy{I6(jxS^vA=qLj^@|$A&c$Ul!!F2-sqHFEV?k$1~`%UiiaT>4#6Dh?QMtOP@V(8K;A54Bk0=cv;JXhW*K7C?UUdb05N~%8 zKE^#+l<}nK6jpmXd^`g_o&g{Co1&c^Ve><9odB*6n{RS=GbfHn>~hqNZo$XwuZR!N z?KMR`p9+7p*~58BhbhLkBZQCF!|-9qjxWRqeAvK;ZR`(=D);L=F&yVRptB3IKIGBb2$vmx z=5@I25L_o9mk!t7=oVabpU886$n~R1#Jr#XYV%`N<DRRCpeE4FV!%xEG$5+Eg`_<1E6;6BU5`5_LLzm^?yAL12Qxf4R zSHedZ`bRiE+|N8&RCw#Jv^Kt2e#F|l%{RIa3m(?q&dT(i_GZ%Eb5upqK#8H zAGwn35dEY8{iG88#EE|5L_Zmae!|#a(5v9XeYj{}UF0_DFm$3?~DRYfb0>1}+G><~P6k+?V;NJJbln5pj z_SxWhk>DBK^d8!32HzI&y~>mn(sf7o$q)BSKPqak`7aG88R&%>=!F^Rg$Gr!HFWv$ z8urj#z+)g^{`p3FnJh z=3&SY-3}3%K8?C#kRhAF$C05oxj%H>?EVOS>$m4F@GB2?zH>af<%8HP<|)_`F(I5p z_u=H=m>(Cdn%tz#5t|*oe+stMJ%6FCcv}SWU-YYE=#7ScCGsOt9c@2gy1^EX^Bw5K z`W#8pZI5vIQM0C~=MQ=t;dqZQ$MwmN(ddgiOfzh2#GU~k_i6a}8~DJsfsS?CBeoA8 zD}7HFS$(=)P+`ZGn1U^F&->UONwzBHl<2z)>4Sj_ksop3yuduw=EEk-z$VMUCUaqr z8Mx5p2f8C=W$wA3+lWs6a}DR=@}p`1+apGeM}FMwKJwI!?hlb0AEj}Y%Vluk0vAX7 zaFN)Di^}I}idLm<({ORA?U59+J(BuxA@*53G~hK&vxVEmJLtELi_tnR!eocD@Yy2U z`?~Gn#Ab71vpKQZ#P*nN)^*)jY`h)N?CWALfs6Y!T)Yb|-gVlbT^$#3eYo%yK3}wQ ziEewWw8MXyQSq-eMeT!kYPblO z9WHa?fVRhBGq@Ofqb4^#!uI$y5ZoyCrP##->AM}KY;a=OAMt%SIrzXcMLqSpti4o* zj1Ji!qx*0obQ`ZGUV@Wx(4^qxDE1Ur>c?lXXmyb%`b7=jmC&#g8g@d%Vs|VI;Ub2) zHV%5(DRa3G7w>?Jcfdu=CAe5=T3cj&UYFzHxTs_fx!{}Hyx?l!;s#)eg@!EVi+Pmz z9+(H2;WERT%Np&Ef2rXh+}?MYld#7x@FuavqrmTJV)u^JqT@0b!sW(#aM2X?-HeaN zI`HE=S_L0Aer|kx9q8crbTX^(B~{@|s=}9KVQ$HM6n{x!a8prxoUTL6f~GR)7v-U^ z(fAJ|oa5lltKrS7;ms@ghO;{OE(u#Y9Y3$dh78DzOsK=3d;ov)0q3kY_&{?nAY!ghwGe;^>cCMNq|Msia7G3wo ztC}DGC1l=+>tDx@|GINA<08I7)-0SS;~cHB1|fdiKT!Wq@tZz~U-@HvtUhv#WSFhk ziNo#r_yjkYB9JXH_EbeB?+0s-#MN|9l6+T=z>twj#^A518Bvj@jOaUME$&v`#~Yl|uIYN<4y(-QZISieKXu3+BFT-SpJQe0dyqBEfy)1P zsQh!}4d}o2FZqltDUR%BVgcn_WBtz;+yQ!4t@4-SKR*XCbz^9StRaA_6!ie zAh8vN^esrP8lRb86#K={oi;a->w+;cPa;Qx`J255h~tVHR$U~qv0scnXfZd$ z#1O|0%!4(0xaiwl;&Wvj<@-e)!MKaplKGB&hs#*+EMAu^h`-YFiM~ppgW%HmelKN>?_{lCaGwS)rOuA9IvwCh>S-osQpzM!<`UTG zldS)1Q z9MtFFUfLTdK0`^T&96HGbL!-<8W@6d3IF_!cB zFXMEgUFLthXYam&`OQ{xLzDtvf1cRgj=TVl@OOXH>LS9m#iK&}|L<5gv2^-YGdO1@66+muK+PmQr}hX09srO24GOR(Vf8 zjdbR7=?%mL%6s--GTvIh4Lror*fvUj062|sb2BTUTL?M ze$@#+ka@Y}^sHV{qCJ0{=OX{h$!{ig8{d`Eo_zN`84q%W8)Mr|e7)qmm3QIrR?#nG z3`Dl%aF(p#!RWi_Z3_hV{bZB$U2p+@vqi@fn1wc^{9UwNO4}Qu3%MG!bh|uG0_KLqWfGB(}3|h+`cw#GS%Z29dLcb}rICPBFhr|5ehf)4eVy>mG6RN?){b z{3qjuoSaD8B5zglo`tuWrS(6QcOU)LviQQcDJS)B$MpO5D(ybn^U2{;0g6_QZ z*O%9w(aYln48Vu>g!Q*i#|_g5bHnCr z3uXJyS*gE*^-|ftS_Qu>9WvYf4)A`r5`OgvG!6f-LL)JEgmfNs%1x#iEx%6nkPCeh zomKkLcp$h@@(XC+KODk)h@bs1q#LN5$Hdf=s!2WX?>pXKkd1W+xXBPJZ^;zbU@T9NWzcQnL;G&PN2kIZZ zMekqUMg4mstbeyl|0p+5|1=(i-KFt$=Iea;iHvVPIF$Ym)F*|rIm@mgRMeumzv!Grx}yib<*$d?1uA!o;Z z|9aNnL%O5rX+kTsS=?sgEVz*FSc3e=P>hrBL_h16y7^9@#~1Kyp|9bzA^kSa1d;PV zGSCZi$P1+)H!NyQ2XR`R?X)8~rKWs=uK0}*3_>>s95rEZ`2MKd1;1&#JWsQH2|MJ^ zf4o?}YVE-f4Srk-9SJ{vJ=ESN+B4*jF`o^b^zqHGIlYJa2jV%KnOlAI8Iq&_CUWd6 z@Z4r#`Z7HCC%|wC%>C#uHw@-=1C$rtrGNkOp!cUJE3&l!+{`2&{Xn$;MS6$O=64zM zufoSF=)3SSO`iz$MWYYu5jvgEd(kJ#KMS@JVKf^)mX%Ez6yx#V08 z-BGI=${E87`eLJu<#Ai1uSs=VDB~sO2!BZZkus(JZ>qlUNXzR1orzyh&+Bn<&PU11 zN+#dF@7PbN?N^fT<|EG~2NCqL`2HFBbUNK>hH9!PE5>`td^8cj5gw{bRi!%l_kA0zj6#ifO@#zzXg6E6mR8;Q1>3m=^Y3ct4&A^&{>w z{n+x7<^x)U+kqCaK79d>Q^`C zQVuj9gA9yY7yKsE{Tc}^#|1bR-7V4Mv3;Koa zc)5Q4Gz`A4f-iDH^o>V&zm9Lz`NH=u)2}bZ-#CLUc=j@U;WBjhNEkd{r61ydG4LAR zkKcy+k$;(fjKL=~kn9njoRg}{o&(4*kuNe&(nIrKazpl)6~;M0$cH{yy3j2yCuj`Qez#;aW$YU!M8Hd9FGf!^DD>_YuI<;V?96A4=-mV^|t}Sb77q^*EF!t zucZ{-2^_b4%9`|iWIwibi=GQB#xwEv=(yxN_}*LLxr;C8-=a&Lp}hFgWZcsz&-ocA zKQ%wwP>S!xg1&zmKW!=BqaR$dCs2H*V#~E*zp=Iur{|v;$@<;EIcSdDq69``!)!9E z{x$hZ@j3DW^a6JP9sMKLlO!LFg?)|QSxVp^tTi1Vhk?imvE!`h;!s%zGp++n1c5-7|OgW0^BY zO!7s|ezlN20zKX|OC`sb7-EIsrPA*m%;0=sHrrnfkiH z<(2A74y&*9?8Wt^)2C#0w4bw#`}$NA)~7AxL-M^i%bm{t^~%)t^%dlE`XxC3=s)Pk zReHOU2RIny%!3?RCmiGs5|<=4V>G`T=%eJ6d5`j2P6w|D=b4EY`rY<*<)VH+N1L0E zsGZ%j9D#eU!^ghuF?aGWm6KD|jSVtB_vVw&B2g(GXgog%t2bo~IBLpWYK>Ix$hk*d z+>DLsx-mgp8@7BN98pQ$9gEfGQq<&kIdf*W(dJCr+~Ru7-Bo6)KN3~o9!#FCABw!b zErk1V^8XUNkhIY5kl z*p0EH4KJ}R>1thrmv}304)#pUrUoxK+!0njIDxUQy}LKvf)Dg-(BSD{akKfJUTl?s z#?QyH226Q@FN!`&JO7k#1V8sEClzQuc|PY{aXm^u>*hNG4`?@e{BjmAen#iUX{6oY&nvw6t}C6z=Y6zu;4pQ zj!{lF)6Vv33Ez>m@s;FJs;nrVmH|Ffz^Atie+O};-xYY7L&i4;_-5ABgzGN*%~U1u z5Z`G19vbR*mJ(RNv+}L^*?#&}`K)7UNsO{s$$7ipnP;*F zA~u;BItBJt_?9a}WTLF4C$Xk(=f~WcE%2H1vjx^d`Cps6RQ}&A|M!y@od5aqpZ*jm zy)O&DmS^}r^zVRoy73+Vry1Yze@g%Fcy=AnWUk7b4KA0nOPIsUhq`56pG$KD3Qv%0 zf>_$+*o|9|$zAMI>gN6d{EG$fu0k_B8oknbwXN~`I~4bn2UKDRZgyv749c*ukG+6? zd5LQiy>Sck?QB|s+nN9_oQ_u8|&A{*lV!o{gIl*-z>!P)m1ZUY+MYI>GMtPK|K)jE!_3U&0)bxsJ@p$ z1g;v!#4~oOy8s@WnR;-22K_VO0EdLnV0t`arM zpU1sXr%o5Ko^M=k4Cg3rYZ&aJ0~&qoLXLKuM{9ZGj@;r1d8uEXU|hC9pP=uJQ07nQQl4c{mUD0+N3Y=ZADFZf&3UVjXaQf z&t1rGIhnJ3h!4ABP5`k<#-k6ir$0LxnNbIiU5QMYfUL_(VJ%a&<5Om?0)J_0!8vS{ zVhcGngcp}{-bV#GZx*n~*<+dH@fZiry2*DJOi}`pBex5>|CI4P%{UAGhH3TNUAHO2 zJBZ!um{753+6SL2i|12ski75v=<_ED%BT@z-e|CxES|OIY1hB;HD%;Z&KPN8Uivtv ztP)-(Yrrd+Tk_wK8+EGP#U5eFGhW|g9`EszfA1jeRg$YQUEl@QFN&jENgWx~K~AZ9 zKXv#faCRItY6XYezs|f@?f!k>u>f9cBW@)<)z)Zr=VrqXv_8nV_~^Unn-4vvY{(s< zw47s%1B{RNh*BRXdgus;dn^3Bi~DZ$u@&e5az35Ghq_GUgj-QP;}CPwI|R6iPx}-9 z-K-T9Mk+1aStp2~Z6|#n|Dbc}OOHo14&(m>+8${d=E_0;t!5l2F8y|I4(D8)B=_*d zb(`)zL7(2(t9Isa&RH8{IsPhT)G=b^j-e0u?Mh2VB>n!+JUL^&Q*uM6tHU~szB$mX zJ9s8JNyx+f*8RYpe&Pp*;&YYG_mF4aU5=&63F_iC;9Km4Ho1oBcHP|9B{~9$qSrNv zE*Iz7H6GorR;_Q+?i1l@F&;-6V}bl*9_$u*#aU*0UB6cr?}bK;Z-2_StxZb>H-UI; z1ko89fT~?v+Y%6CJLSv%;2yR5@$@z_PG8bGJXZ)1%%)9MoEq|z# zy`OvGx%0UGeu(6Kl5>6&7z^3276V_3_UtD9qKNlb%$;bqbV|2E861mN6Nvu3ShKd%{nXpt5-PB z)d|csN~aClD^ZjDV(SSkA}6)^$A7688=_Or-Br|*-Wuei^usqq)ym4dI&=#0_a~%S z8<*m1|J6_>I{Q=dp_p^ESe!tu5>UTOTku2QTJ*AqpaISfaM#B4daLyRS!v9JGPOQ) z?2T^E*s1RAY17<$%n7}xmZ6_kDlIZs1eOixWTUA^cwhwcbP@dHVQlpe;JHt8zm|8$ z)u$VXM|SO0hqfjWBc!+-sak(Uh%<*T=C9T0@&2`XZglKe&>96 zQ2#cpVQqAc5xY8XY4V-w;!?&vxe&hxG@X1CJPv(xGxMgq3>|(P`tR6BwAc{wacK7E zeek=uO${FcgHdk2lrtCgmUH%$W_MyoYw+Pq(%`#I3H&9j{GCR<*qkZS{(N{8^X_OZ zHs^z!tsSyC&2iYAcjz|fGP7=TMnI=)=<{lIO`pxVKL-DUS+_ZJ;qzj1rtz!-9wz4$ z*NNRn-IYHJv-@bru=~98l)&}WDLSao&`z$J?Z+8T)S=maMw#>2EkEP^2jnnS$*X*f zdYggKifxpGO=QC+vSSlfPF&UC8e^_sy}(>=*hY<&mA%LD8C0sn{AurE``+mY?05kB z$0qS%lPoe1!+%w}Yvm1j?qgX|ZtsL>w{Lok`{*w02f+z^EiqjmYoQf^C1tcy49rEt z=%)qVR=&j27&l*WCs>uhGIE>#rZ%=$`euA@3;W)IZ7X<^y7ppMzf!iuootTN?2l&i zUA@cA7QdV&wHe+jHso{c=lD(SUA@1py|ect#=tvQr&;)Waq>{+Iai?*-z=W1R{uWg zH0lw)tIJQluG+A=%9q49_JqE(U9DitB6q28FZ0{**~s_vCc0}Nuu;)+#7Uj*+IhW&*bLj5>mKvGUSb&IgctvX z@1@=Q@HadLzxnJ@M<4(VZEpJBfN_Tc&nepX3LMA?$;)bD4s6{+9CPTo1%CQn;&3G9 zE|&EMk*^zw=X>`{#`CSEt~%`J{_%V(c`osMGB#Q~ODLvKVi>AroX`~nmgSU_d43q) z+IwQITl6&P>)rJ40RMjpy)2)noHRiX%k9cZqt8dCD8*GnmCo{8;u}S7icKN>cl-GG z#)T?t)3>3A)>O}c_jg+6OD_INH_^UYrJy9Xry{XY2d!)9B@ zZM^HM;k=yNs%LIisY9)vxf40Fh&k~AXVXef`$Y+kz#hI?!T;gFdW?9L>=JZ=>%u@8I)5Pk$TTP5krXTO7wYo<&!eSgNz!zw$Qm zqQEBKdmA}3Y?33ea*Wzpw-5bf>>zjD0ma3bJmuF`F6x~FUL*(ba{N7h#!qadDD=AggYX1&qh_wl$@?sMdRKm)d&r;V z2-v~zY{u8YSTEu_pKC5Sngf0WR|~m1=aYw*x&`<6L$r7)&fI%z67YC8F;}q%b2+bR z4t*29!O^kUBopHswY>c)m^_8-?CD2SB4a6VGquc^mY$ z*}QH>e|lTa`Oo?AMT6d$BgMy{V;R?hXswj8LTe`&YsqPC(3{YN*jN70+}Lb>Ylh5? zd%=h3MvND_d!lD~CbAFvLU@4GA$jk`{xil*<8R23HD`kxyPNQ7HR3xPrMeRF`J|$= zcQ+{k$*X+s{UPoZ^sV7|aQDm9brzm0Hk0r+p+n9D>hPUXe^qhkv;VX_-ri{W{Ki8< z^CoCsd_>Q`B6dq5@hM5JcI=D-Gw0_`hle5m+C~u@?0{zHQV;PTrW&R587U8s_R@Yg zb5UfH@I{ev=cdAcdxFK|z)2nD4^)%K!Pc~L{%;$Lu=StWZ zryRxR_x)Tc#`oYa+$QC1u1IL&UD|Aim!88XcI#S4?_c2aapp*WnFl&ELuYQK`1n+- z+n;4~A4Qi+0zQ%3t#?=&mpo)?{LGZpv59h98T-1Sqg~OAr;NRR9s%nQ@QOScEB1jb zXD)X@zni$axJqnQ5A(~KWo;BZ39k`c$yuwj;FDS&Y_2jV)=ymDU_nP&#ZTs;*j|~; zyObw?G9yRqpo#O&6)=~zvyI?IKV)pmhbC$8e5S!OI%FqCd#(bWI_V6p?#Q#CVW zzAyNC;kkTZnhz`nT*BLoI;G9Kc_(~AJ3Ax9CsbdsSbXrg%pdXB&*%ErKeJB5dt_=$ z7jr<1;|hzZJovldMv+zBNsd4hc(2j;betyd_Vey+(;e>f8$~WA`Gw}rir)DtbET}@9{_LVydMFbrP{G?$&YV( zn)x&|m$e<%Bc8A{MkrQS-5Q6sCuRNwrFdj{MXw)TGM;&qKK5e&;I;TbGLccustx}j zdNY0|2mV3uS!@DMGd{uqZQqSfB5Na6Tr={CuTFIY7Qp{o$Le-+0rPrm+EjNN^X=$Z zB_QJ`_VEJv0{dw@$RXQtYx$y!Y+mudU1am(BNUt0@DU1LMIM!*{}BHaogH;C9ZUje za@MevTLC?89Ca~Xvv{xYehu%WjMQ@t^+><=1DjEQD$mGQ=VI@%OJtq+3q2F2x(ik- zEjHw)*i)`4`W)LjTq#b6KgnLVf>dkc$^9mG7dnIZ2@D_Ge8x`nFtM}6e>iW6vLSaQ z=SoAblG|MThFz#!-g#10jWJ2hmR z(r!BQNqlzI%*kYYb~WHt-WLo~b{qEUZtC4)f5a_yAEC|2>i{rL2#1lnysYg?Jw2@NzKu;Fw)r8(XFtAxa6iDO^f{UF z>_K*jKVSoXs88v)z|(yuSbW4a7oU}GFUZ{d#GLOII@?5_4SG6`oguXIFRm+RlKUOs zDLDHX^q^9Q(8CdMH-!IA^ch)8D95KPG$wPz7@s3? z=$O!$_$a){+*izVdxLi=C--6h2GLE^W;6HkeIA_!&!sN06*FQu>jb#u9FeM+5w1lu z75Bny)t!N^f-i~l5p14HCu7SzJ4X5Kq+EF^Cc-t0HM)iHIPtHEO;XF88U|0@S3l?6 z7RhC$5mQKu7qy zs4Io~la&a+S$w0^YkFqRInlYLTo<~l*dwQzt76xuBSSXRXTEC@+Ya4N+fNYnTh8ia z+&@Wze`4nsNIB?U5t$Q;Q#Z<5!piC~OHm%7L7_isua16-Tf0XacZtoZ@FKeVqZc@z^~sD+fejs(R&wypY4o2dS|f`~Fr1>V2X)Y-pp{rmL_cu%2UfwE$^uS9sM(8Z|UbgQGaneou(4dVb!y(;7IN}jS?>fTaF z-RKFL5B)snuihM-eET{41^>z=-g}GGpDRIYNzBb5MR454SK4!oqC1_%?(WUHVCUY2Z?^-R ztqYkadzjle2S)r$h2Ux&<6anpUBGn{x=TK?$mU&kNanDoqO!LNn73C{^~$>i_0Dr* zyZNK=g~nK1V)HBL&W^DXONsA+I^-OjeDt4(2Y<775pz)P^XhLs*8|Rc@V6Dv*BEf6 zfcGBiaZ-=q(J6g`1{G|Y6vp51gQKSw&u0ARjFh_V=uxw^`K{U1w<6QjIDa!&&87yo z6^ym?jk8-_{xls<2RSy%td5=e)|wrqgZJ#fKJ2VuKPR?}X7hpvtsd`fO0mx_yiSq0 ziH=y!=8g5|7w9&x=ygY_+Z7Y*dWJGr;%!?Q$K#ZJgR*bJ-`)u0Z(;+dF^cOCj3}*ABm9=+xH0Tgci!;6TR41K;1I#a*mhq53wiUam3LL zsy~R{E4J|IM)(SLVQfEJ_|xf1Ky<$E9}Di@PaSeDoY=T$v8^QbQtVo>hfTg^>)z*# zNG(1~O?35aD{vR8@H#W}Jl7GZHtgQpC?B$W-;(n4utSS7+F~Mx90-1KGTV6Lp){&9yFM^L~m=`n1@+ z(5>V}TZ_Ja8oRgA6zNaH$G6E8=TD!^*;DwUj=y3d-pJ}cI!_5CLMJlM6JjD=lj0Ix zkD3!Zf~G|O0_g5Mdo6mgWo2KuuKOisx@o-UUcsp;-X(UO!0N&dUT#kG&*7@; z7SXOA%7w!#@h-rOyt8>yYUMl;VBj2Mq0=$wO+CnT9C_&BcF%Hn(~6_P;wp3j@uMw= z=cK`B=FqQ)(Fx>U^o?3z&4p+B(IHO_P;VLUWnI_kkMw^Q_qoi`o4Cpx3b&(EKPHBS z=ks|V`_7f^=X0&WjiOtLjv)G1_fW2kj@Rtrla!HnQsyH27hWN{iRdUj!KQUc=E51~z`%(1v`>~bILZ{*NirklHvPPQ? zFA1;jV86Pgo;c)=%*&RvN8GQXLkc~Jj?n@v3CN8^XyFEY@!|DIp72-u)g^kW)VE}; z?gJ2ANOUOCt0aC)Vh?2xdH8c#*Am^-!~V1Wux{#CztkNSf;GGj1saj@lYRL*9&|s# zTE5rxLF@^YKBVr_^T3F2zb@?EdfGO0_ma1nLI<6koK-}^CZCMVyV;p2S@zDv;)(NP2U$-AdS=ZNd|M&+V&#Jws$?_iKMFm!=) z#BKZBHzlyTO$gX3{70zadN5EftHqjq+JjP6u7vi7%9x}t* z#99AZY$dF8Pe zAeY!U{cO4}Vt;>B%Da)gtK^-~!qzAC_kt&RXX9O&wD*+povgRSc|M`N2Y9~VN5*rhFWU1j zp3l?jiuL5xhSi0<7n$@a?A`ZK+3l1K0%F>qG&H(9HZwbvuW z2`uPWPR3#vWmEc>?InN14=&Uxa!Fv2zHXy@H06J~O4%@qtFcyxDKvjZn5w_g_z)!gDP@3iS#P-lF+)<2^#t0$(FBXZo1vx`goGbm^zq=)~P= zHab`F3zKsp`ZR041HssZT>qjRE~733V?+o>88+m~Eza0_} zYxpMc?=P-ZwdklDu?xV><=4UWTp7Qk zywLqx#^%|6>+!oo>+yn{=1_i}=6?BgHiz=-Y^R>)F#6dX+OPgIo(uh4F%GXbt+VSq zw!{P#n*X zT_DjFN4&(n#5tU!4$<+8MYqWxRo{KY%DH?oo~23XxwG=LxTWe!wfJ;?Zoqp>ZlK*1 z-9heq*BRRI4xO7V{@7CV?@w>b3!I&o7w}Ha4U`aT(T&fmtJaFno9LJ4l&4{>xd! zC*a4Qm`8e~T$j3OMvgkt53Lq=MdtR7#AleSU|XP5+@_A`_#fc!9AI?v29 z1%kJk0+w3*=-ZOr7UD>~L%}UJg7+-z`;6V`BsFl(uJ*0-FT0u;3G9v(#!vQwRLD9% zu-L0Ado#J(xQfr}Furkr5MSj;i%Zs{MRv$q`9GTt@EdC zQSl`t`qMY7nolAhJj*(N59>Nk>Rrfskb|)+9b7YG_LM!%hA*NGoxA}2Nt}qbrVnk1 zts`d!-_CRKp&ca-ay4~`AH+ed)tbR2y;T|vk-(s@=`Z2lXU@{r^s9&H{tY?LQeg7F zpz3S-XYr4WR9s1#ekImTmNrn#tT9WnrGjVF*e3TkkK;NdH2=2zNjXs@}i=2=4XugYMKUgO+ zdOPyjo6MS=8smD8b$c(rGjYu8vAKcGhW{ds_t1r`+i#`(DcTSlUB>hrYkTKeFKqW; z@Le>Ad=~`H!HI^&+XwJnyulb5z6+yHqmBW67fV%bz242W8Q(<$ z_MefHplo)%%v#e(Pd0^T__1(K$K@5<5Opfa_tdch4|f=m#4CF1GAt}A9EyQ zI^o z0CZ*O0KVP9jl%!CPld0+>$3JR?@mManv9!^oSB78yNzE7|L1YNkL!foz_61`+m$+~er5Nddf$t&{2x-k(l>XIcxfMzo z=JW7WN1%c_S8-pL%6k9yDEBJrE6+tXY>Rf+)ePm?D7T%pwgP_MJxbsc=vVki7jyb! zA2uv=oqZ|+8A~a5oOynN`TdKl)s};@|FJ|xhKMhFxM#{uj#u~ODHHd>a}ukOYv=&C z;*VYdFZ|G^L_^~fyB|=CONhTU-;b=DsMOmgDxGI@9leQ4O_7w%zfUDm0`Iv_P~%*u zkU5ptI3AmARqoh?o{&df(uVNh<+Oi<_NON>Mu|#i$Ng%{j8w;~vahLxSle!?>zisz z5Z+u$d*iH*?97pr9i?<;M>?{lo~vM;-dHCk^8 z<7wwPyf$6%fIbinpYcLpT@NW6-n~X?DKIBw4p-wd>rA6rW7PF8d>KV{_A{4+V(XU^ zTYva#O7TKs>sO~9Y-msuJsrf>JBc$8x$%L?>aSrA9%U?u32r%vE>eQs*OrD%14bLa zV$QNZXtH)x4Q|VpdXC!F)+(iU>OHLC%0BN7)xQIi;p zR{5ViQ+cH8Dy8^6_Mtx)>zMX^_WgXoKA>Eloe1I!f&TtpscgtZ&#N5GeirzOJ*I71 zAvjnHJbtzJdN%&F)N!77A@I9~=zV$~811}kPgGj$Ay~>;*DVCzI?niB3Jf_p$|DN= z)}5fV_|fMU#yF;BQt$SxY3>cwJx-l={lR#?XS|4k(w^G~JG3#|K>dDY+SIylu&(^1 zvbYQSERRzri>?;MSlHr-wP4&bXwO4Ti3J)IeYYT1DUPOX<2%^{(FNbx!aRRJ4jCJD zqaN>1oO*ZMje)vvIM$8HajY}V{LU_s=bIRdmaNCz2hRpId$yez5|I(JfjtY@WzBDu zz;2p$J@noZ0qj}8zJuoxz%DrS0K4?fk3DM-!MHsP#`KZs6u?*lZuSxrC%U{3e(Xo@ zDjl4=vj;dFgOheH7}+*6ue*BYw;~-oZ;VXb>5Cf_@J8`NcT@(eJ53|2XAX)~b_x!U z&_~fDy21HjWa>@yLwLMj>PA25LH|F7+}lk3!jG$;jNiGL`m0~Bp1FlHr)z5Bc8V|b zM9|Ud0KU`k9X~p-7aiD#p3yUp|9q!BdG}6{pDDDjyxul51sK$tc{@{8WoH+2t(!7d z<{SGVrk;S7n(uqe{ql#w-FJaAnfGbaHGU9~xxR8R`32x71$k=meelnPsl);>?_X6m z7hQLwewKYiS8zn+e^flS+8XJqW`DX5S(`&W`Ppi51v;GYng!_FvTq=hHnjcZnr@mw z%xtWynQ@=0J?H=OY3|AVH|j{6O1tn1sYm!tA^L@{R?*sD4`1jemP25f%m4r3zwuq! zl*@m>CXi%9!8kO$ZPDY$zF`;mku((#RZ!JQ_j!DW9r z9o&W8yFEYP-IcGc%SVNBbVrBsG%wkw)^FIS=jblS<|VgmOX(GIbo;4Ka&&8TzL+0q zK1j|seny_|RQ`*Ppqp~H(noy80n3*BY+}?Gd)cRBT|Q#2v8O`tVneRkDX;Cx;Qwm= zyC%9C&=D&?yU!Fj1#O*pe^6_|$W09u{3_vZCD33Gee%=3|50de6ZE%@-*)~JtLV)# z2P78C3r%__m;+~pMr9wOndf64a4W1K>2x}Br-^+6W30-~F;$_lf`D+vJga z*F>2|&4J(&e#`kS0}kvSi3evs1-$4`$)EFI#b#vg9+f%F9PuV`JwiW{2a%ru_$Les z1ak%j#^G=Ap<9X%T5PdM-d#(6hCRmKFZMrK(0TPZxYrx}s@?Mj@@XgX$pYPruU+Jm z?By7bFJKqXj+xUvuc{HQQ?%bp`&HP0E|IaonFDM&z~%$azW?a3N3oM>d)+Z$lJ)mv zz_*;bbFr0#Pkz@M-1sKz5})#4a4oo9L%AK~BUnV;$Cu?{(+%^lKxeTN`{v28Hzos9 zPio#hepI!OEC?1B|G$eV2-84V<>ii7xbk(v*})m zw>hy_?Mxn|?*kjgzV~6oM-4-!&S4*v)a#5K=5n$ZY$tHYzI~@Uyu)vbaLs1F?lJJX zli27gHL{}$nJM`tWZe8|;A8Bdz$V7m8@DOZ_a@^34tui(1$w6Pj&acNHhoaQ0k3+v zLfIg8;yKm_Pwh7cdjDt+baj{m-EZpn_Z~6_YLFem=Ymfufp?(MxA`3qT7}nm_Y+^p zelZI=iWZB8?%Hj>s{Zj5__jY^lTmHt5h>3{?joDL)a{eHPvr-EC-MV5f7avpHCbV; znMv-PlPU0RpPw=(%=ns+A=bS!y@#M#>}c`PJU-&DzV%Sf%IGyL)V|CuW zvR`fyqn=&Vb4K`9hrrf0v+cR+nc~OK2*Fi~y;d0dzZtn-5&EBtFT$u}s{zY+@>oz_ zgN3=6O+7}L=Z!KCuty3#L@NUyk-Y9knP(_-IvkdU>Y3?b@9RQ!_~>hLu5xmQ`a;8! z*i8*W%SIi`LuE9)%9wy}nSaLnM-4a%m6NHAzXk_9P5k)YEFIV4r!&e|8Rd7gXPNmU zBc5~ElQ_jjaH#qiKX;OJSOp5J{LTDttqf6tX_#_l6p@|y$zSI z_vHcV4TS0qtf$_Mra%<_(i}7U^vvwn78x~O_9Be-=l_*8N3pHAdduc!vnRUstSc{Y zYHc2R73DJ6%c72UN&HYcHW&DwXkt#e7z=+;X%(B|X}N&c7NZqBV~jRtm! zbrv6y^dtDFLT)XtL7Q9rs==V$Eo=Itx3gP7XJI$i}aVV_U_`k3|aDuDe&oU_+P^RJzUGU z?&n&?^^aWVaqZyR1pnA}mpdK#bnbob@8kIgT-WCZ3V3$v2!7C0+&{|mv!+1cJTft8 z3Y;+`Z-<%#r|rP{8|3pY;@F`NuQ|8XYfi{UFKzV>>C!mKSdnSA^#V<;R{mU8Cbqfbfc924i;x(WB2S2PRYdPAwH#b8g`$LG8x!`;{Vb9W0MH| z;%_X*-Z)vd55Lnbj#p((FdZHA$X@QBa`d7v`@P$Sxy!g08cN?A#0HG_*syne++*{0 zHgB8a?)ez_+Jch}xN70W?R&_BH`D=smDch-!O0doaL6~bE$d_gZ_|GTiy5oOEOHMV zY5KheM+tB={Z4z|D?C&1;%!7PME5<7zT34;)<2AhJC3A9~yZF`q?Q$oKJF*<*jnKKgw}3|;EV`{)zV$z>nC zpZ3HKE4V|>^0C){{U5=NYt9GbGO53ZamaWjsM)%ej32t($?dzQxT{}A?`JF=T)%^@ z9Dz-kh26U2#VPIxaCkI7DP!U8;N%kY=pOW(o(|}xC5U|se&1ADSG5Eu3vPRuC&b*{ z;ryCo9kJj6_E7noc7aFm|0#4N^PY1UT`k+EX!(ho*@LXDouS9boJ_{n8UsCf@Yf4p zs;m5uX$J-?lVz=`1iyU^{_<6#DpA{5RgD8{sM73;Wqy zCO)*gdaG{G#=FRud}F`|-}A6`Sg6{y{So8%&ZXKrW|PAJU0BPPvPt|b$ZfHgccva_ zkQl~asKY$R?<2R`&}r9O&}SvSu!`Su#xWJ0mNl~?cW{ck6&O~U64`%|=y%iCy}%X( z9(k|n(!lf*WyDutVg8BVhPpbX>}ro6Ag{-{=W$+`_)$=Gwb@W#3c= zc_DhZW|B+l2KZqr`_8u1#<8FHuHJ*>yq2+Rp?@nGKR5oA@bQziC<7LOaTYN7fyaP# z3;Od)U=vuO+YV3D6!()drlJ4Bi!E087`(dquS$#L4D_XyWLO@EpPMfE8R50e*-Y;; zcq9DXivK~%TF4*l-8NXmyEjYEuOaftJDqp*-#c9ioc{Ao?!O{SnoN$R296IebF@y5 zy@2Nwa`bt@q5E*jV>#%;N#r^aeEYz4g8F>J4QfNf?omoTK?x@%XF&;hPD1!3XS8 z-xgv^4cR&c`V<)}_k!DIb9}Gt6Bm0ch|b+7L-A)AeVo>hjwdnp2{PtF$I#n3`liuv zs($_xdQQ8^n(AF^)0n$_6)dS%~k>%$RJ2E(^-brWGV9lgnhkq-xgYHF=4n1qOr8 z-s9f*-;$!VOgbR6X1*+~6~f!g$zSaq8_In?ls<6Yot6hLIM^MGf*+)XXe~`)y*bw{ zFb123xMh9YK6At<%kN6pamJ*lCk>j09)*t1LPu?^LG`D}wP85h91VRzk9LvM$ixE1 z)1b*7@bpHDE~k%vhMcabnr8TlL=NF!I(dNg?r|-#nr*WRo5Y2`*J-u0wvzm=H3XMc zwR(JGmB9PVU*kIqb9LOWV!tEtVORVPntna(y8`ew`=u|@-UM`OzGI%9+(O(?3HHT& zuGYs(*V%x}Hq)`I0JyBF4!7i%vUazxw|xHCAzzxwE%`;Q$N}*STW31f3G9-SQu@RF zF1c@(`>?tSsjK32@D+KtrT_Qz;f3(;xtDmfOTPan-}A1%|M&D|b@=z(<8Qk3`%n0u zcgUJJ;Z35yIa@;UAJqGnxLdf6F_Rb)GEZDTYF8949M$#5x%N z1$9}mSA3H+o*3|rL7JO=s5dI8rMC19VZv{ronokQ05l8X-Yquw z`r^2Lu4Iec>7hLj`|*UPlgW*=#VbCQP`=jCToRq8Krxdy%sp+I!@g_>JYg zpZC&sH@K0$irpwWVLUlteAp%__!CmLRL|7#f!&hwBU~zT>I{f6gTgM z-(%YtHcL%Iu=pL~Q$=pt=z9-iWMPaA-LedQ#NgEhV&8>yj?Jv=oDv=_dPWX7bP>O5 z`1b}U?o6?^%~al#_U`W0;#wHn%|bWy=Vs{U*U*hdGqk74)r3TsF9#g6Mz8f}@Vz_7 zyw^5U#XqPFuAV7!{MX=I_-Xse;6?nj895)$Pk+Q1h2y@4IWIH~?%xs`5d3R%6uqNo zeJ*m$rp2^-UAcYv<0PkuuaS6@de#^==Kg=liR1^)?eM*W&t1A#Fg%};=yRG+0bN2Y~5}vL5x`<`MPq0tom}G52>OkKwfe*H=#olQO z`L+(pwR)!E)6#rch05Xxd?=DLN!FGmhtgh&XM#SvfoXIIraEX!aw2uZ!?m^Ud?hfM zJ_&zMp2K<)_>w$KoQ2l+ZDi=%9$=BYyj}RmBuCN~WV~;GZlGYo9QQ)(r8F1uJNEq6 zDsu9oi@BDkl9Ls>ewzAsN-i(*P_815;K)jRChQTG_j{SwV(;W@&l~~4y#e!ygwQxj zyTT7{LobqCbGIQQ@iqMdn6*CKq6DVWhDT_xDL>m6ck@zNKaqU8Uf|R7D%nU$pvS9h zrkvPZO-~9O>=Of4$z3&_I-cj9;5Zz=Gx=^gaT2m;(?|a@$*ok7rstfslV2%<{7T}# zpIv6Ix8UcEP}$#GRn_aCq_^T0>?{Cfn#OFTG76D7X2Wftk2H8K&OkoQ_tnEKEzI536GL}_7&)}5SiKFz*1#x)!s@u>+szYf9EzwbSvZ*A=16n~=BoeM2) z;;M4>=2JIiqQPqpGHLtUq?@<|x67mBmo@0!I?B`J!haB>8)bcJ_QT?gF){Si}`5jC*R2O~s=j07!Ek@!w z71jx5ADCQEEs0zzJj44aSLzbF&~)Od$c|a&KtR^??*n&6eyAoJKJo<5Q2L|2n+J`M z$1jEaP(l|b{@+7e>F{wc@hHYRn-N>_Npf!BjZ^>H{Wkryj)cbWk6MUNJIng6%$eoH zALT7!T!~eXwZ0&0{cZ3G-;mti9^exm&eLMkVCa5qzB_|Fvt{3rxn**B{rKncyBl+Q z#7KQkBhycA{x)=ZUtZvRNnW6L|BbDmMa|8w8yas%sPVGbN3nQLG=1@fxK2Ev6S@-u; zr1AdWx&M))*Osh|Lch7Agf_6@i+A3yx8XuB-m$duu_|kXs}#LBVFKR2J z++BOqr?b{8u;##{+sk=>^6_=T9~}#{_s!FxyK?etux`A2%GqE`40CiK+V+2>&*cNA z7?*DrJ}`Jkk3Bb#3GcW*^#W}toeAz<#eAQRT(>X==pWiRmlKa=zf&Ir>nD=i{z5MF z4XiZ@UTRMSiyO;`afCOP0{;vDESa{9*v{v!aZIZ-$7V9`CsVd_9Q~23rc{M~3ko{B5$xV`;ODJUz?7a~b=6%TGPNE}60Hn#Xr}d`Ijm-?>BIdH9ap z5*APFWybWm(3sMVo><{B0q;dCIW zkv;sStQS?As%I{WBnLTh3-jS4L1-)u-u`fg9wWMsc_eXX$LZq<^9WBHI%C`5oSixB zb!~&^b|$a~pLXA<(c?sqQ1@~CDDTl1k+WL7#ChiU&xmaX=0cS`+V~5)@aHybd#vyD zTZx&sAoGnFEl>4Ont9J_<{d3a0;&%_s%NO_4dJ_nzZ*kXy#vzpaTOTMbd z7)z`%F>&XK?>a*)llXE|Y5)8Ham6uTh9 zC$we67KhV~iN492b=NMT>k)l)t;HEL#_(F=a`ZUkGIULu2U5>zXzi=S8881(k25}p zu3QjxX`J!7usGx2kUI>&e{?r8Opn#n_ehJKSN~Jw5^^v+&iGs?&e#zZ$`6*t`%s*5 z9p%I0jL*PZCC<1oEY7%~O3w=>a7mnTLDZ#r!SeC(6cX>9f$Z#`7fj-eXYpSj2>rfgBGfo-x0Dm+N|%5UKWZomiQQ%S1IHS z7%0woEwr*r_GN=loww+5##hny72}NKM!SRtD(#Ly7WtrsM^qXy#&LE%#yA>37;O_{ zyhG;ef&pTTg@*da84KMT^mVy7<7HnEXZ)roxKU(9_t~&GW8fA4$N}E@kSQyPfx;jA zR*%Sbc&?K*>si^uzOQ$R?JBM+^ogI$Ud27@}hT5A|8*c?6FGaDsXKDE{SD3 zjgDCS@V{6C()W|(Zz1PC&#Q=Gk=WYn_%HkDH2zOamr*X8a%cL=4fn`?D)AZo8`lND zP1M#KW$&ldhbq;ZJ>d$PR;eG#;w7u2~ zS})X}MSX{OzasQ(Ecb76KR1^(7ut}zvtHNp=cgn8y3oB(e}K;kTgt1&jT1}AHGJ>b zeTSetgmJ+d}@cAaF7_{lxSVtd@yT}D6kGc#n)K1BaE(!YJbGTx2$$es!D70LTqykExl=DArhY<=V{5BW{_v);6Y-?S#u2(S`{t7k8clp33+g3?J){`G|=BMZUZja z!X*X=I>VDaO8Y-jjY1PNVLRpdJ3w|D6Z;Z#p^P73T^=B9A z9m%uYe)ZN;Z(gX3UT=)&)MjPFRLXtASzJ@N9_KoVYhOGQeJlv|@c@02KFhs~K?Y+} zn5ww<^IXR5w_NM~RdDEqs|pTP$otgMp6Kqst(SV`{uu8!pfAfD6q`lXG{q(n{k-%M zUDsdnq~3@5_&DzcHnCNz7whj9guR=~J6XRd#qQawy(dlQ(1kNGI`Hvh%%VkwEAYx&w^>*SfNKM4NC<`CX0_Jq_Y<3K!l;MaUp z#q&yj+Wt>?a@79>H?nV4yMKji8NUVm@a;v5JuBr>LRZ5lDRs+p@tMjz-i%LOo5%R5 z`A*|;6>i-(osNAVaYlWNg457m&=uV@lC7J9?!%#eIw;$%mscm)K1ruJmOyRwAk0^Q0!|=DE5`O z*ZSR|*w?yH>}&NEVqeEmzZUx{bw0`uSOs1q_SJxQjKIs-D_hk1@BN+sp}3qb^jwL{ z$z@K8zANJ(b2Jki-5$D^ecbX~Xicupe%CJ(^UI3cMKQns*M1*wzBuOha{d00a z1)gvg?AQ2Tb}*^bx~5+5c%bjPy(q3AtE``Q)$cHI;FBK}s{!%lLJ z^;F7>b-Vt9TyN?4)r#D;Fa3MLj*4s8*JM;o@$TXtv{!nCh9B|o2=<+t7p);?BJ@(2-;Q&R_9QvQIHc3QYqv3bZH>Qu-JOD|`!Q3(cL5W_@(#b=;s4e7v{M6(cAZ6d z2-q)S^XvHh&en;i?LH|lx8Qv~9be!s;9(m)d3zVUsle0z==2DqeS`IW1ns19zK%OL zfwz7u@LF$gJ;}YKqG1d92+soZM9R9lf}QBz_}Is7A|B=&f~i~w7ly9}rgFf3avL(H z%3XUl$6YJ`;MchC(!;qg#U|K#nsQ#Ig>RspmfOjB|Nd>E3Hykb`vv2Mzw&^_bDY^O zn`Xp4q%rbr#gzA0N5ubGoL&3*Zr8qg{;DoxuSI?1Ux;VqX_nhh&mYN2RaH5;vTFI{ zu=}>QimIl`IxAjTwQF)@#o)$@s&^(^xKSsvzGBJ|?6iNa@$dLsj=%k2M$?p|MK^>Z zS1?xaMs8W(+7{?tokQ#d{({JgTN`&0BhczzTNhc;uQ9S79=RVM3VK<~A)|fT##=s| z(Q(Iz2d}M|5-A!OI(p5Z(2-f>HMwSR=*V<(<6Sc(bmab&Q2RZpp|j9v^=`BF2xHVX zhcj@ahv`0?9f$XtJI-uyFF6b!&Nw=7@n5I*%{zV9(9rSQ)A8%~&3o~X8~5-

{kbKDbFjBE#sdq;2hG_X z&XkS9u5eRf|X7Yl~umx_soVWtceXYh0df#IU7dLtb)YM}K zO)B7=5o>jS`k}KvisvD2eD5>FGIhq}6U(%@$cZC+zwWS^6dphflVb9L-5JBAIFO^* zk>-EB3uZDfX8>~*Fm=aEJj~@YyTg2je7AzBm?**Bhz(XS{|wCcuI+;PkMNxP_3kk5 zV`46~Bltq@8%!y;|cx({IK=b}8$B>|Ny2+p9Bop%We3uOgS>%4<0D1igs6 zvuHNJOs@G2=c3~KRrs|o96mGqb)H{PzqChHpI)K;QD*Yq67;VAz?g##u;^xE_I%DU zB%veozBbqFw>OtEiq|*{Zo*^CO|03J&t@;Q*`3-J(%D4WyLC1UJ1#z*eb4@z%_kb` zzhd)t#`yg2 zf{Toq(Q#ArTCSOzH{bi-aBTC7b}9Z9eZ<3^b610BV%^isn~8$ST$E2GHHrMb_j4Bp zJY}pvmqCuyub9+$5c#p${rqI%vl(1$`8Xnc-QC98%X+{mT2!3pF~NsY`S;&4h^*qTV zkT)jMF`jv`sGu)Cg&f8vZq9zj{_sZT*h0gYu#-9KqW#NLINPcDbdhnZ_-`5gKLG#r zWgM#Wy5`hPss3(rvv86#p0WAvwdWD`Z0p{HCh8wvUu4~Py8KTavvZjjiqA@dH*YlE zvP0&rMh-bK1!dSS6tkGy*O)MXoZ{Br5S}LQXS*}D8du%zDtZ)O>e--ukiC!e>{UK4 z#-t8AIk}Hq|{~3_{nMG|{-w z7<`q^*HBL}kNLf@sX&X(;B~}p?mY@VN5F?ZlG|=D_jU{l=Q9Rn*xO|%{pL9C#^nx4 z?OU$BDp&0Dgm?O1@ox2x=)JoJ-(GFC z>9*uU!^rU=@%kpWd*5p@UjG?!9^&`R(fIsW1IGpRGXq)%FZXPyj?pqYy*RI!`>{^lkN(1$vNKy|gl4(PF%R!Zr)xpa zl>b*}oRs?>zMK8|RD-x2JKjjS??<5j9lw@5anzUXSh>ihHy^@Aa^PRlA$uF!f5F%a z=9$yv=Q>`nn6W!lNpT5?cJD_fKRLW?56}vFa^&CJkfh7bu1d+k4`|{_~_-wf8 zt&uL#!FZyBS!ZIj1}uf|GZ`1@#F_spo1d+3Pkz~0`zUnM-a$ID@ru6Ldh-OnJs=wL zO}g%%8oIuX z@N(u5d#wO=c87nk#cnH&14qp06Mv@xEdpP$oD-J4X%oRA+^G(B{UG5cQp?ZR;yfA0+)QPU5&@@cNZpSjxP zt}y0*1$N~|~)b9iMkX!m1?GIZrz~%BM$&(W z?>zi3`8%BTTztWuXtr^bxt9c<<^Y<@aQ@-a3aN zbH%Txb*kI_OetMH*Lre3tR`Q!^d057`6~A_1#gJm&y;NEx*lfvU5B++721B+J@kF= z&|x+24>f9j3{RtjS$UdVy#EdF?faS5O;%p&AH&o2@N_9W?c{a37oL_LhmO&8KU42z zjrG01j=!Ee!0^4k-n+2dy-eb1&1vNzTaY%eW?^jZl`{8UW$rCx?s5N!o#zR9Yduw- zr}+Dsj=?9g1;`gFnrSVH?Do`NiYz&R|5J7=<$NCIBJSt&zs}&QKL&Ze%7(nIZhi+ zJ}1qWQ{(Ksm!UXc>sQG~okLCuCzo`hueAz4Y)|>9e+@mHoK9aQA9YKv?Q5MjiWrkv zKI)g0N6L*Zz<%N;a#8<*`TrI&sOSn~_F3lEPVOga=KsHfUAUR4cF3gUay?eeJt^hFQ6l4zTt`$?K#TxY3f&%Ik#gHeGr#b0s$| zt>$yqOFOTV?p_*3UMKtRr5CSyu`91r%-_1-HO#tm$-1xs|E>>cJ!aHnk-%Ve%D zKh)cj|3P%)2Ik=ME!^>B?J+M5AEMr;>FC)mW9?h54|7+ledo&o=DzapJxm`p2Tl6) z$3LT&Fb|J|uk=3ejouBT=*w_qSN=I8G>SQ|^>-V71*guhd#G~_y5e}Ra-Ne5@7|*p@AL9C)4ZfeyHxK$iHT_g<=Rx=( zNMBvdd97jUzw%Tw_wDx=46BjNDvF+R5ZYSj9-vnuZGL9550B9$Lftn%emJDNKQ(?< zjJx8Mp^^N2&(Ys~jPXfmr0>eT*TP!$2i9uoVY-vDxbJZ54yzQ-l-WAx(3lRN68`&U zdmS$&ucwnYw)Q1s?XSV}t=~Qz65KQDCv=*9yJy2GbXKkHAEE#4hxZNW(A07f(zM7z}1 zT>6qmUxu=FzQ}z1M0qeRz1iu*u<%am%Fj^mRv(60xo%78!v^q-*CV7aNI#fFJKFcf z>muWLryS!Gt+No)acuan0Q=AMLpeI%)ZB?{yPj_+>7R7ycR%^(I`mq~3m@{>r)U4- zf=)mGG;n606bEs2# zhy5-465CIyZ#jIsgKrg6V)cFytA{P*LcS?iZ#*9vjXUj_@Wroqml?~eyp0&RL_QhHxPOy; zuhtxzf<6 zz!Z*xM>1;W2l=PiqkcQqhHMBH9*U*moA^*=AH%17JAd}NK0GX1_vcUYT<6Ah4na?q zO+)^ivif1~r6pf>(XRAAeV3l5b%On?wZ{28w(L{8yNDmup80E()f$p^s}~uT8Yjcj zT1)9$Pcp0_u5ayD`?elXNc)@O+K*00r_=Ww^g#9il4mc)e@8d4zN7aYNU?p{wQ=9I zKe;5<7so%WHQJGzJ>_?!B^4m<~niH}8 zeNc3a+owMjlh+l}kD;CVS4JJJ7vt*4w+xR+K3hBukA8$Lz<{O=2|ViLQl3d&*#bJS z!=1UOFM04ip;NbeLnm%d3Y}Y;96E3j!=gb+L1%hV|%!Hmr zX6@$>q6aRxJ`@?zG(|SN0@lhi>{BD|%E?2G@BXk2emk9Q)HV!2C;Yg;IwvE4uw;#L zada#*L(&1-sFPUkAbS@NZE63WJ6dazw{;8eF1PA__lJ*IygM|kJ>o_5NBf8fbd&C{ zcTMX#gTQ$EnD@CF|5se07Wl6v#o$aFdE(_m1m6PS2?xnOt>^M>=0el9KN%tUP80E( z$5|%fqu9r3^d%RbO9HNmjXzRl)ZUW;P2e%dM=9B$4A?GUqL@2$@m1*E2c!T zmqf=wvqMJhA!us*SJT6Hqn}05rA5~RjC*wCb)f*Zlw5Ev$Az&#O< zYvb_PM1K?UX#0~t^d>sNigwp2L>4J%jo~PrRXR(m%z8v;NN2IahOUT7Streb;12kBNbUAs? zGTncu{6*@^W%y*j%eRU?g^iKEM&rgOd)Y8&0;{lh+s1DjJF2sburGUCn`l36H96=m z#+C$LZ+`o_-2E>EChay{y+E-2^ztUrT2;W9Ia}&>pQ)xR$ z9pwd{06m9#(Kq_|+poFxx+RltyKcz^*djP5W33HZTQu+4FU`E2Kj&LBv~||XiXpcA z&V|mR1+;Gzt$dVU@(nZe2sTITN=p`h1G6ehlXFAa{*Z79bGOhc`cOk3R?!E^ zRrwZ^z+Jv)2dDCEGc+H37J$#f7(N;o|4&k1E@fQKEiHy8rotOHz$4egqeM^@xhx*- zs517QTWW^R6)85|443^i^-9&9i)??LxID&^_(G%h!AFg?=av?PC~ws}w{$1`Wz^Q2 z#_x>Gi)%gbm|^CBq`DP9OL^INs$%lFrMnt558CGZoVHDHaKV>uGQW8)ilFlw^!41*BI*_bqX7KhwP3PN{ci2%3v1U|^;5(3 zs*fL5^;Ew!yRW5JYybEo^fIkiDx>jZ{^f0_{&v?zwQpJV)4~zHrP7Drv&X31?1xWH z+s8w_E$H+?#-f_Cx#U83@Fm}?)kbEn5n_F)B`>NKm+A2vc}^KGFk@}Ddsg||G7O`= zy4}9pM}4+Ne&$2W4P0{nLZ z*`vKWem?n!pt0RY(E~eSyZWO(Vg;Z59&4~uKlKIVJ$1nw8j!0*{8nJ(<(u}tE&^Wg z>>7^|u>56ZF+4v^H&Qj$9lOnaXYV)RF&AUyX3TmqcD)(*KB03}W^J3hH|u#Xa^<

3fJ9XgM-KNE}7EK=VFUhFPT_iih=fJFZ$EYoUPKEs4p!Ot-g$KGo zz-?~bLOIWVdro+JRm}VpT1EeyYK{9e+K=$t_m|XLgikrYwMX>!Hh#if|IeK+|1l3a zIqza^- zr@%}5r|Kks-b&7ojk;*ixbjP$p4rm7{(Eyw?}ADP_T(SEpi=bmTWq(XrcxHPd4DL@{Tfx|>z5MI64l}O64}8-ME&YZov}~pkQVxk~+Fil( zV3xD58yYsj;ov%SqaNhSY540T{B<1uIsq?fTz_|?%|~i0${Z%werqA^rC4*J;scF` zY^^=btEh{+YN_Ltk-bLrBe-L&_^AngkuNmrBBv~6H<3>PnPv4~`QPuOpX}4N=V=C_r^sEp=JqLZ&=elO%v z<_+ZeA@>vZPsNLu^3?h;x_thmt(+&7eLsWmmDpp(Gd8|}zr7ipRo9y})vCK2Ijg!A zJe|6Gz1{0-uibz@-;-(NIr2>Nefn(V+a*)6!6mm!r(Ou$X_pk_S^jsO{q91wIl3SOf?yFb@kIOPCKIKexeIE^MhReXOG5AOL;oQ))`7 z>(B_Be>Z$I_tk!U+s--}$TZfL!tc(SXf(yv+5YzU?@Go`z1CR!(0cTAw`)SKh{l_Xe zTldVf)_qVj%MbeV%00#eFF2k=CPj0MyufDiK1#+57Baq)vC;Rao19EJ*A2-2 z>(RemwYLm4QWM`{=if%FT{AcX-r0QCGEZ(iKp9iaq+A?I>%Z$r) z;Ofw{9)8J#rlp_vy+-`ekZt6B!q{tH)dIYx(>6SJo(<=z%I@%jz|-Edc#8c#7Z}lz zL~}25hZ^eaxgK5CE3|8}{ayRH`=}#3m;c8$-)#QezSm5AGm3A~rC)|s{9+fZvep8H0G{=m?=SD` zSg-w!^_x2P@Lh4?&iA!^*WO0I6jhdYTWCqpJV#cWk?4eW6ji zg0kD;lbwBxpJXD-8i-#CF&BKj-R;}S0a7u~E?YslO!k=d%cg{kKDOR8_6Hs7ODPk$ z*zU7|Psr#+{EqU|rs4>&K6UZ_4M0s!Ubk;@fN&o9*8`kT#!2L0}1%HE4zaZDOz(_&=@J7vGqS#|+>i~Okl(c8vw zcgDRJ#LA|rtTCkdg1bA*PG{V9QMM5t8gmh4FN&4Dz$yEU&a$K7*EcD9Bf8p{EXroZ z%3kD@y}7e&GCK5D${t~lJ?2u%UK%Tle712Y?kw9%*?P(nyRvOmE^*4a#=cQwpzo#r zy!otibIzI8J?38MTWl7*v6tss$}2yvvks53#?W5Nqx>&jA^u%4`MJqOuqaDwd!|2> zq*y-wM+ST_`uqO2Tpw#TZCX5|@2t(8YjF?j{Alv>#@7*i_F+~ohah@Wb+N6NC+bb3 zE+K~+G%1D`@hj~cbqR5AG5#7wyI$6xL*P|>sekd8zJZfl-)Q?w-{?;^y0H2)?OgGP zB^MQcsPmKZ6F|2Q^uFXm{~Fl@au2-sy!g62D=92JNPO#!=|RQNA|AF6d`HJ<;pk1) znjI^z`;%9K&lqSohdi&+ZG*sTAqJHA^j5_vSYz}$tuvo$a0kgrzymGJV$^s)Hw z%jnNRzN^hB<19SmzxTU}+@)8+Q?%@Z3*-tMKEmX6}q-?u9hm z;oq_F?=#F1>TnNO|L{O^r0kFFt1VhIb?=MXO=4W0qYp_}uN?k&b71n|i;W@qu0ab9 zFs82~XRdM$O3r%HGpo#HT$6t>b_(7%B8z^5EXty6k$2bd`3>fXt6WLb=a|N#!|vMQ zw-THD8}6%{OP{QEw{>sV!+iFzN00AEqdB<71;6*(W`+i&8tns{j6Bb~^!Gc)l0m;S z@?=A>=E#-)(68a+q!@1vfEJ^PwYryQBk}N8um{+G&W`gHeO=)DcVy3??BCRRT;#)W zO`7vg+9P1AAN@G_(D02~xPn7GU;5C;nP1%XHj4W+`qlWDH_fzV@peMlt0>Eu z)jnk=?XAJjHWprZmbunJJEh=}49>(R&m8OORim<3ky9q)3mez`B=W&RuS*Xa zkUNQeC*?(t2zY8u5?#ZTJHlFmPrY^^=P`55fi z8I4>A?*y8Oj@yaybtReGBV?=J9HRcO$r(zcfA=(e%%#vctX0; z+527CKHS(qdZ7<@-$!WwFcBHoOl~*XgFj|o3}aj-mKvdtd3Ta?X3_@=Xt$*fy@oqr zWiukr>P*G=MfMw^BOM!OcHon^gZ+qPhjN+2gEJK;v3Gu;`~69rJJx6en%NsjcJfQet`XqtF+3bgj&GZnPDV_2o z*RYxw@CgW};Q0g>n2{}(?eM71?J5>TF&^447V`Zzcq(2OZkux*yjp~vuF%@INv?@* ztiiRik6ZsU9#$Lpo&E=%L-R!Q<1GI>y!zkIId|b2wOc>1eIoF znN>d77&3}`D(0*+76qYKd0$&ssKLfE2D$0vzPg;b;$aQjO{|*MD)DMbq-tiqnY5sk zdZRwI|3APQYw0#w@D%HHuGXTF++oRD?%~YD7x0_x{xPA%hyo zcNiwW>?+Fbq@1JAk;l{PY6w z#80iP{a=-zHY10-@l$%w{N#g%(kC_FzC%00!6b&%;prmozz`10_@?po7QpA+v$mC7 zAr4QBhOVM%GB8BfYaBSpXa|m){{?3i-$e6%>^sg!OZnBT`Tt||N#OOw;|@O;A;TRw z@%${=tTN2`qdC}?R3;g@G8$e^Vqe(b^e?#)d%D$!i^!Yol-In$#-;L|ywCg)-@M8i zjBfCO=b~Y=I5#<@#Oyz+*z}B&UcYcNYX?5!^WjPk|D%V{Z!MV6%b}g;F?}HL)ARTLa%d?S+7CHA{VKGQUFrzyT|E3>^Io}8tg%r4Si5?{V+MFA|Atd; zo_U^f@c1v|%Dn@;a>l+K8k}#u4%4qM;Y)k30w=`$!uCDHJXByW*8kvs6uZN0`ILg+@P{h!2~LA1)vW0{lP%b? zA2;+dw#$BIyj&j zVXxqKH~j7G%=@&`k|jf}b%9)_?)7zLz*KBe$&2JP!6uS>Psx%fK9>lw2UAymn^f=^3LesB=okL) z^l&=w)4^+}i~hLQ))jDeNw|yz7sa-M*~v%0?hf^X7Gv_=$hSEGEr`_X64-v95w?amjY@UJ9shvrpH zg`u&7{O!Lr2i1%oNHE3^iVDfYiiY4-D&83$%*M0Zh6w8KtiQk{MVwe5@ zEXG)KBXzg^`wy}AF6``M=)2a?bk@8(;5A<UsSCi5!;>dX9F}1WJ`KI| z*e@tXNP9W-uG9cB6h5-;H#wZer@fCC2e@k|-Ma5F0k@;VEy6kyr0o*z@sJlmbbj&k zqrjq#TP@ttrG)#>=07^g!F};~@}?%#8%sSO{ntK?yF_Pe9bAu2bO*4<&{hU+E_vWHYzwk@qyj z8e`Q%j^Cj&s+YrB#XYM1SX)a9I_tH6#@>AuHprbudaHRYYid92Ajdkk7Xt6F`+~hV zB|_LOS`U%?&htdC4GY=pxPe2`9E%>ltI$1=S1&@7mmYg`G1+Hp491gv+BU5@9`-4h z089PN;XG`Rb&@@iO<&Gr%YUgp73sVOcF+>+pcd^1```YHzJd2Vbj{$Ue7m&sTiZu` zL$^Je124teS%NJpb))o4beq^arabSYKXcbZ>>c;?^&?)u6aE!2g2+<#ZQP-Zua9+R zHszzgvw2ziO9p?5bMx%|zp~;+RL6_WMe#zz$bBmPNqQ%IJhPm?v1Za<&5dU{U-mb6 zDI?x~g50$2C(U)quH^8|gY5e~w7<__Pbxkh1Kc~fd+`4UZXvQd9_|(71Mpy%iHAEB zxG%PKSh{q29KM&w@O_3gM!6Lne5ErlXT7#{=H|N7W_oz+5F4(ggS+kA3KlJ)S0i*4 zZ8V>Au~~6m^o^4LW7BjL-#ysce9*<)%TR7^?qupN`yBd~Gw)beZY7uRq9FZh{Gkow z8F=kA?%Mc`cVJDEneowL?)ugIn+8AlSgY%gDj$PCuN$u^UjuuDc$iW8^!|pPVSd8? zDt=B2-s|EGZsC0szr|NaxN~~|{eGwO z_rUN#eoNP}zk9;U#U);e(wAGz{(HUHkklXgwJ)D@brH_s#OOM659K~%E**hi24F*a zmOY;{ULD%=F$WwzNOC2GuNZ2N7kiId=^6RjH^5`sv;U(u{r@B1s=a^dsm(cYJiQ!! z-fr_f+a{Q1?+X&!Y%mAZJVIYr!DpiJP3SRuBy&6an%r7(sHNyHp2RwB^rM zownfh)VZGbT5D?kZOc^$IVpcZJnsr8=^2uf@)yM8$G5j7_arB&Z|Tb7!)*E)j}ztE z4u4|Ld23$`ZKO~ChB+_Z*%?2Mv|*3q`<85aHZGUTZ>YCF#yiqOd(nUmF@_UZ6~#%DL@C;P|x6QB*zVBx*z=?`mdfc^-E+83T`e^Csk+U>-b zSVzh(StmTeZJzymN3DKK-yl~d-`jQOQ+u+W_G3CxdahQoNuMPUT%> z7W-A6Iq35h?A2tOe476w#I)cqZw;Y8th3_yjBo|}DSdb1%N0{L5O~TJ=V!m=oF90- z#`f2Lx?1Ot{5y(i*GG=BD7IR)nO|)zvf}>c8MSZi?O1<;wiF}az`ge={Q9%}#PHf} zodu5n^N#Jm>j9o%HF2gWmp00?jnMDu!}eG|9C(TQ9_@J|Ydg}d++K10`O{kZa+hL< zo2FPX^VlE)oiX#>_fF2f$)W3m^jZ5MH~gok@P~d!bzYFY;cV@X)`P#(r)==uNqc%< z%X|4i&gcIa*UR{cY+>40i`FL@Pu0_SHUgufzhV*%#wDFN)YNdZ)lPaC9|-Z8so`Jp zTYE=4pGQhqZP!yzxyi^|$iA>E4#}}?D;`O9?Nh{#IO8+15Aj1*?tR0`rE=j{K4P&v zC0#K>9y7O1u^UTc{n5FVKk>h4Qg8Jy-J(f=z7eP%&a}?7*zb~otyn^*&*+1xdC)=o zZk_W`jO1g$+VO28^cFN}&UV)_FW1;@4h_G?H!FVRl;WW6*h|sze6%Zuzl>km{oMSQ zbZbAWJ@wyK+IwY<-GB3cQ!HLeyb-wd%d~2~zl&A}Zn3|MR_U=hrg@x*>r=pT(La5& za88JM$+Gy4oVKfo2fg{1*lp}s`cAvemHs8(*qzm%p$~h!>~C{ji+qR9DbWAGDW~m3 zI4Z*)eqRQAS`RrUs;8ygqBU>jnU3y#r2ZlCXAI^h+7-+_J>aJPKm)5kW$dGajD4U_ zcl=cb-7KqrsFUp=+=3u34v zLyd`L*~C!_r-X5E`nC%gYRh|`w(zs8_27>J&qZbIdCP_vOPYxvN`xo8>M83_vD*xu zr_6fFWDc>*6!(Ca;IJQCYYdLzejYfxC?g))g^eYuGCk1zamq9eu;KX5)7FEOsqb%> z@tmiv`IM>X7c0{PoM4Pk%CN79meuE>;(zu!((i-We#~t=hxnJsz7kCG@6h_HHk*yMw_4!wMEtcbYTb9_#LU4u zuf+Uj->2AgpY+1fvH2|-78jdtVyDbEdL516zt`GlX8QBG{T}3B((Sj$zoXl4>YHD5 zys21v)lY1-T1&=oKY0mvH~GzskHS~`Lt}0*wkaRrYuI$jH?*(uYeuLQ-|`h- zV`h(>NEeD4}329Sc(7QPU!dQ zwwUhlXES>CSrW6DD%e}E`s%QkUzq^Aq-WTV09)|_%%w$}$W_{KneE%pnQhE=W2^mr z-1kcIsdj#!Wq&`e?{RU7(OaO;&^R87qmSyAldIOvIOQ@sD5o`3T2{3*O9>3+o@MXljJYKqH6z$ys9-14UjkJ6iGqC|% zG%Jev`orRN-rW={_bPD=9^n})cVk>R`T7sx>mLQp@EKaZew|}}=R}vkzX^Uj#2!fY zNQa-yv0ePKrNHKw`=PmbdHRU2&db@@_T%Tt!bv^zGJEmVV#YiJ`&$`yefgV&=hY`W z*55_DqQ}CF?){9XN0|O4#l}y(=4N~xdc^aZ${9{M@hS7h;zxWU7C&0!F=&>`oub_k zJf6f_*TXa^l)p=LF0MwoaEMV=-|-3h&`t0?#vSa+4BUQ z<{9D58k6yT>4__^dXJ2Vl6_*j%NC$SPS_c92&XsSp}f&I?7sWD)yWB@$5EFpqrn% zqPgbD?(Fl-6UuS^-!6yTRcw;RKbiGbo$S@4)oMVUSGNf>z9z_ajU z4}U)VR>DuiD5w4e!ba%t4s5L-jB!tMT6L1+aLr-A_Em5_gltkdcf#BkywkCE6$`V4 z{g_n-86D4K^XbzO$_dv`cm}B#V810;n9*uM$5B?RMC(2F?0& ztgU?JKoMgZrhGE~*j#+ATWOQB?c!;t?cc@93%A|u$r5q9GX6WUT#4TWs|4JCNx4GS zUq{yNJl?e?6fh5*7%=%BG`?z2F;+doqwnFka&F?XdMfuG?XpiyZB{Jj7e;6?|MxV% zC07(nrgh2bw>7VM{~Yf{bKM!JJV8$UiW6Vc0L}J5LwM|iIeb@qjRV7x9~$E)&+|>Z z`Y7MZS)Y?wt5s(cwu(c<`dEDVjMiV`Y4KOgLw{%`Hs@Y+1m%a^GSLVfLf0*^@`^ff zHU0LM^Idp3c=X2R-VzeW1G!k!5{`I}#6Hxv3n)*6J6v=vwyR$av23!OL9lWaC>~33-v`{4b;_mY*bjPw z?FV+`Jg~+PJRso@{2kUgCI2z-~X;(2~!KW07+-TUwdCV6Y<#l`sG<&HuwSGxBW1}sW<t$2NX*1q0=J>Oex2eo9 zwofcI*2>nV^Bg}1W@hL4TjjA!wP4G?P=+mzerziR?^2%fiFmCx(!zgwnz3YV%0A@S zg~*v5_C7bjntSyl^F!5X_PC6qUdw3W_BcZvDl+!9jCOIilR;H);EjKx_`^!oY<`kIhWp}4a|;@{15JijQ-Yd-Ay7nJArnTgHbll zQ#vza*({Z#7d4o&>Wk*jGMABuF%f^0Y?^k?v7fHC^UR07DE)9Jd8drQ8;5%aR}VMv zuiAFcFR!O3fY%eOjlhyEWf9|)d3gB;O@rPzmg)ZEv8EsV{g}qB;$L6>pnggCSjG7N zI_5uOziTR4`9bM#e|D_?)ShD%KRjud8(;N7Q{7LGMV|fRG0c+IJCnXCuTFd(9m(ZD zAOD)r(?cGx{3{9e*XVx@xl-@#oI0NHX6Q7Tv5)k1hvsoszU*duF8`>H%`=hRM(uLS z-1r^xX#Qo$Evox&?At-!AA)W-@J+FH0qBMQd57=M-P2ETU^g^bif2>8!fRx#4(FnF2-jOLHP#;Z z*v9iIcdR_`RbKdxp#1-F>X5%q^D07aE@xhiv&Kt#BkY_(PL805h|7BL>qc#JQc5@a zy#vh80yh1Q<6|$o-p2KkZ*SlFvBu`z#ofznrp$55s2@LYXaU}d zxRgU@?s{D$yEAib=KM$Pv94jPXAn#0oKezzx`lD^;TO1xXEDELXqA=Gnj# zzeZ{^PuUEkJPUbxSSS5FeLTO;vzX^(o&la;;~C^RiDv`Pi9DNm%KsbXsn`<_YvGkV zeLTnVEao|er}V8+JcB%aJR5jQhi&Gm_|hoPY@QzU&`h2_o)_~h=6NB{0MFq(gFJ`v zY~VSRXEV=Ko>894lj*@fGl-{;r{q8}&we}uJf%kmd3ty@@RS^H=Glv9l&6cQ2f9Uz zjJ%eK_LxR3K2km&uf=8@OT9_F;Sw||XtTg)%F^@qf^=O}`6ka#?D8SX>;7Kf>-O(zzH9GV{F+_9oZk=X`;WWI->>?O zG5FM9px>|B-&-hukA82paZ`Kq^gC>q*M2(C5W@u?{T9D{|7QR8lBYp?*XWOISh>sr z$>fGD_V*Urk}PQ6Z2uOnk_Dd4vGV*b=C^N?-S#woOBMvK?xLGyL2#q}Tks?cd_T0y zt3Anr;vd-Xgu7%x;N@6f`7K#cyutomO?}CNh6_>#X-)C+-KX*|+4akTCs`19v5OXx z1%b=E@R2NNej(N#@GM!dF4i8uB@3F@#^3>8vLN_;3?APl3mVqg^a%0Yqw=-(_i}zq z7BoL+!z-t}a`9*`?8&lo2u5E)$N3L?zG&~*IbWD((UDzqWyU?RbuD3jYEGPQ4%OD! z@?hRm%1hc^7duY=GWq?ab9g?%4`dGAn_;?Ice5S@(Gd-EDEAAbq3fiCe@OjiS4vF~ zJ(qpl2ll+{u3t$%vG)oo;s1HeSo=Z}cS*7bJaf;#bB4u4cXM^~J1Xa{lV_*+v&TQo zGd!GQGPX6aMtY9;*JNOuz}`4pXK{u$Tt=YJ}4Q{__!ZYnX&TG_<1@Ev9H&UpLi zx0TZucH3xgqI`-Kop<6O| z+Vs=?40gQIP0(+3 zpyOxIQSX(v_c`AE;cUltdmM%$tGo2bwmAiy>9NluWI_w^##bHpEj^5TX2Q59>f7;S zK0h|*e*;(HrnVFHO6kg4hf=U>Nd8|9{Nv<8jKb?JMMmv7Y^G866%~E_;mJ=V4dvkftW77a#zRt6mZerXgF=D z%|ty&{4ZDyu{uvsN9(rUt6o2=UTSzvmG+-*;x?1u{^ks9{(|THUx^>a z7yAyqi3{MaiP7wX$tzXgcS>V~T+Q|FmnR=*@1|VIo2txxoL^jTc#ZAnmflEzXVCBI z%;gf!qS-l!IV&=A*<62KV7bQ6tes$uU++*N%&ea}<7d`x2JY+Vr_n3Ovu!edv2Pkb zALG~H9^O_@nGuY`X~wsI=Xj=thw^SG-#JHjjYETa`gnv~vZaP`jSqYRbB#RF>W9#( z0{T4<{fsjm>@SHCjO7)rBsN#MzJHshc?CDtH1p# z?Y+CkUV5jVqLIy)Y2lBXy7)4&|BXHL|J(FmeGbI(*=axP}4qqy32oX520wl$}_+mzzZOCCoC>uhD=4{ZFjrhQal>&NHIYc2VOuWws1 zHUq6iT4QqIhf&C__prGz?`m^dAK+PQU6Ab~!*I27R&Mq;45PJ_^9G_#Dzup}$f!xL zsF?ZT^LG2vA4H!OJ<#XMIQo=f!_s)%34L0jlk$sf#SgmQHR$L&SFay#(S9HUby zbZP-E_TEqhW5n1i4(5Zstfvd;gU-IZ%DTClJ~ZO@*a_~ex3kr^RrE!^809)KS?Bb> z{K|W=2hU@iwh!^tlv1_@+KpLF&Iu!F?@G#Q-5X^Z#Gd)vefR(vBkaxGCq>=dGacJ? z2B2$<&YvgR8Kb$++BCB7$QT-4$T$_p_7b*kgzb)4TDz9>kx9j@%j{?wfsHt%osZ z?AtZ&8=!sQZ~iqGLSyMO%bA0=K9d$sgAamz{cAS%^&hK-=Oz29$uHA@-hx5pf2Fsa zU@hG?B(>(t`b(`fyLZG8Pi!h?jn~tJS*CYAIqUd zF0>%9z4icW>vVW=0erIz9ZI}tFt1ABMe*K7w_)+3{*SO%9uJL+Snul5o3?ZRmw0g$ zyhsk0y_}D4-Q5E(M%h=&p1X_OH4*so>750JAHFvqA>Uh2W{irV@cGA$lfE5GCYYU(3v zZ~K5)-qFZQ%=7{XY@?H8fT`S*(7 zdeq)4YTs}$wm&dE+8=nr?cd?9r-XNZoQJ)-hjErZqJHNau0?YhFUh*jyOhxp#=XbI-vi`_1xrM|76+Qnq?*pK&dewd!1Jnfo;h*5B`Q+($Q=9ON?{`^DuO>Ih`clS8I%@^A@Srcompc$E zXI|XBUEe=z7upP0AD=Gx(s*3(Rr?yFjWY(x32i!g#IGJ?%h33oW+^|j{R@FF>^)^O zx~*i{v|kRsQZ~0l8*Ai{{a$h9R{>N7K|S2uo-yI&TZK>yX=J(aSI%Qo@P@1Ev< zjXrihP1;-}J{A08{sLcYY)*u!b^_=5X7Jv3zHk3`@B^a9^6PAwZDRNT_7l*vNHzm6 zx1@}%Z#ctqs*Cx+kj{C`}aDjd2ddKnJZL(EG+^$CX5sX2*CYyuanH=fsYV-~x zj)U)!zTAJ4QaL#(LYzEt!O8*V{nVe=FY3>0<@}%g3)Zay9p6U|;dl5S zBk!|T?${uv#YE@BF0kmx$c@8qq91DeeRK8j=gG~em>1T++3&lIiK35USMK9%@BUxl zyM$(q=xximV^wyr0em}xOkE9)bHG)xB$s24yVo_KwTgJP1)S$^y~YR~XCJYPeaQus zQJu>vcRy#0JAlcWG+Qwy%cvumYVWVW{3|f`BTqKb=6#&K-M_=m-KsX4h*!Cnec(J| z(-xRUXB%0xp?$_i`jCa4qm?yxVI?{@{Zp*Vo93#z1-p#ee%QO$b5}qoxG zh9Pp(Xq|qM)+}S+)56&s{F}ru8?`8~YojBX+nh_u;(av7-~JF~wy@um&aJxG z+^xFDk+&Q9<|AfC{-`(Di#c#M$H4(^C!Bf2rU{PfZew3NjCvMdP)4wCiNh>HN z69=yhdO3JWw|DR==m9V0BYZdmew+?pmZ0NK!)I^XbUxWQ_!jZ8+P#{(Ek*vkW@xW@ zp1h#E^L~hCWSxh2NbP6!e+z&4=$$QntLtzd%ka+3DG>&91KBk?Hxt)X~YdV zeY-ubZh#S`KOI^rU$$$EJ>O)Lj$jKbjQy70s&{|mog?48%$p?U9C$ABd#`^? z`!UUF=iUVLZuBP3dH3GcFbCd=x`TD~j6o87*hIO}>uvpP6|}gQ9D=L7{c9>rd`@20 z=NK)BtA#G`3bcs!4b~OYHzP)i9>2wC;Rk*`dz{^pjfQz7T@0d)ZociX0 z)}De1HAX>ZO(lCu^?x37sv2Ih`ip+STt=r$Zp{RTG4y#AaeC|__sO22SiFt&Q#OlC zY!Vjzm`gs&SvDKm7zNBt&_w%f*5eQKTkFppbbQIXImjAfLuMy~|1#*O|AU3}A33S~ zurq+6xfnFb2Wt|mj*R)v1^&l2!5734TKn3L6l{!^>^2%-rM&#e4m^`Kk~#OZiuX72 zr#9~6xpi=`Zq?vt>O4G?D6j9W_!<;HrT<6sC*P6&pU$7j|Kxap@K6J5eIm4X&o@ky6FDA~*EzFx9zrl+y^G)-zj_2lqHqRu%Gp<4Q|9!GNFoknPHg>^-6Y@eT5F~3dM)gCsn>Lrs02LT{~)kQ$D=COZ*w*Y3qM@P`p=W^X*LW;7ID`r*qzp_2R-l#stL&foqwbQ$SEf1^!5b0C*FdKB*GSN+o(ptW0V2f6bhvF&2&-#~jRyBA(t#v0Muw}$nS ztOw3|+0(bUH9~7cGkn$zteSZ?A1BIM!S7s~`TsrInnPQM`>v!dV%p={`qH;pTf%h| zx!trLX?+e7L%$4Klar+V;kia$#SrdyTvIu@V#qr@mro9|Hw!Sg2hzWTf4A>BIg5{^ z@TmR2WZc|ivH$T`yc7Fh`}|{+dnZ;WlXgw`>j>{Uc1zB9kuyoinPha8e&{Ut+?Ec& zZtJd%qBEUDCcHhMVA7lahO8OEJ&*2nbq!(kM&v`U3yL0lXGrm5(wSP=gK(bEl0BJ~ z%O_`6R`ORdxqJvnCWH3j56VqI*2 z2i2z5;Qhq-9Y*fVag#@!=eJDnD9MNjYopeT1L(!i!xxede?Hf-elmKthklZOyT-%% zC;bQC_iP{d1P>c|h0tpw&*2vu6V!%ejq1Kn-O|3s#0cMW-S(QhaF)%JE8)o+WQLRL z!H50Rk}q*>1ew>$b4~uLMLF?R6dvWOx>EQ9gSHvTS{g8u41UeMc=ND=FQ*elSjR_5`JUI*Um(U~lG~aWng{lL zSUy>E>WO$zRBX~X&9#fwRz{R31Ym?Kb zvai^uD)w+u&=x%V}%aWB4&c%5sH7`5M`PRA@{ z1aeWj{r+QK%XZS_>V=s?vG9eBO?`SbMzMo5^Nl>M--{xXkIzaDwYlASO?|Fw^b@bK z=h(EiX~)gB=oWWg`z!9evs>MH4YU=Z-Kp?tguX_xp*_(4q?}lf-1@GJi4}B9}x|8+t4q`+4 zT4%$0xep+egD62=Wb>6MOM<>2h{$x9zYH0Wj zy1}F0G(wM{i*UwuTeB;@#>2i)e5!F4U9<=IJ#eIlYHm93W>Y>4PfM>p0PhTgZi|7_ z>>7qY6MH*jDqBA~Pf`>;U%o;orvIu%#@a_VbH5X|y+_e2Tju+8ze#Pk`oNUF+eV)p zmnaT(g))6rsR?jB!ly2vcda)g#GbHwm$4J?DJ~AOn;sy)5sIOI^U67ggwxb zK>Ry$jCef8j$qt7!EVs+7<~P1PWT;-`|Y&v!A9KK7VT$XchDFL4`19j!7JA9IQW5t z-@%064GF)S6Mjb%etWPL#>4j|{4P%T9Z2{cO!(c9@VhzTcQoO*hj`U)?I-*$PWT;2 z_#I66-H`CRIqtVZ|ES(2jK7EVJn@~chj+z2ybJX3F4)7nhWK|e9&3(!r}2*Jx5KY7 z-t;i{JPG*vdU#jd!@EG-JHZd?cN{-8#QpX&$23nI+oAlBO|xwKUm4}@!Kap3rfH`A zee+xPTC8$+Q_g|mVQw1uqTzAHzHJMvwaXKC-{|UJ(CF%CTK7U`WNJUMtKpl&@P*uB zms$Qy7tCT{Dz-B5{%g-A%<0OOHpRoXus6!b4v>Xz?>YJ8u?pncc;+9rw>64&P|o{P z;E-}B=bgZ3oX*CwuSq?((NjwN$(6%7XXtv^^6hOxM|FIAgWEgSA7!0#;wW_=wsXFv zsU+6_Rb9AV1g^Gyz+^qFm{R$wU1s-6yG$eppJtI!`zK_gZb4WZcwh- za_^qH;!8anN?o3z_iESwE6;UIDD7|4 zDEmd5Mp?|!y^oMb=XzVN&wa{%Z^B>i@V;ZWLHtP9IlB~W1X^=U##{P;x#3y8@%VHeK^0Q=7aeu+m0RX{qeDt zp2)G>C*IqTd2afLUg$Orx_RooBa)u*&RXz5^~@Th{(B`)Y~4_D?w$_|HhD)Rw(a!) z9P4^~|5ws~&)JR%6}|1g7e3e3_f_g+Q7?4X{^%P6&^rdAs}91C)~i-AQ?@G^I(ew^ECcaz)PHyheD9J*stv-A_rouMyn(|MpWp7MDbJyB*IxtV|67;ueFr9WwXnz{Ia51qzw#_Iy z0$)lm?x{xK#J4p5R0nyz;Y;x(V>P=HTx8>~;2A|u(!X#=hZzb&SDjb3^>^f^`}>n+ zZ!PBi&SjHB4OJuBEV&6jTXM{Oy?Eb;KVr4l*1Omnuy0fhU@p3;w>Nu7;M0HmH?qC~ ze57aD{-N{Wo&e(mtvT*q(77M|>`#BO!7XJT?sj+5*#{0@pFiXyj-wcQH*g=g@Ra?B zfOjiqU;cmO^X%wu;>@ewnS7vX_hh3teXM$SvN5<~N~CHR@0$2?H%5z~6>;pD>~U1y zklk$teT2_t_tQS&Huj&Rp<$%&RBP_saX0M{H!T|1BfC%DbwlW3e6Wg1)HpEaUg3yO%zkTY5A7yovta$hgj6T=Gko|pSA+m@6S^K#obY-0*`t)M~P=XO{pIK{ms|qt{Cgz@!u)I;gNY}?LqK6WQ}uLcn&xQm`euq z(>TQQ;rVEiSneA7-KjJ6@`qf=1I_yf$!n~0ji+w!!?<*vZIrC>F~>FL2cXOE(`&)Y++iyTlx@zAoB`+^F!QK2pbIS&z z`w`~S&O8er`IpnejqHi*@!v#?jM^RO9_*VfJPd4rI#+^Sa$5`QNy$C-x|aNSvL(l( zBcb~hy~y8X)Rr&m9edY16uilnH5#wzNITxyI;+9C$^*0^*}3OV(Wfh(TKGy=HNb89 zqqdF{AH!o1*JVPBeovCufi++B3i@q);4}L!^4^k$lZkPSh((ZNkQb*@R;_Fm=G3bH+$K1QeM^&Bs|9fT#Xsd{|DtagYk)O`~<7L{LI?% z{%6fN_~=cDgS;s>{^B(I4(+t7hB7rjDURfj+c5IMtuJqMp5cGpOg}Oh;+t$ZFXE1$ z8gv<%4$Ia+>3m?LdgRke;^qxEZT??2D$nqz65s16eDBdNtE{X%gS&|ge?58=n{K;2 zvrhBR@ZS@!lh2|z+G6^hPYYV_*bN#6cZ@MsSfkV==&;6zUrG`7j7`;KFW~X=zB)a_D^9Tw>2&{oNsjj1iWGiMaX({1XT&g(VrZO(nrvS+o^$u2eT+yPSqq&4%{n>k~CJdYRp z?PY_^v?uZ*y_wI!%o(-ue4g!-Pv13+WRz@kRe#y$cA~#j{VScE`^FCT zci0uGy}T*L#%%h8c(5gb2eu8b%9RbTcwnd97EfE5Y;cr-wD4gISkUH!oz|vbcD-gk z*y)N7c0RAg^T~1<8&BF&Tei+NHnsWFD^K{NJWu3LzGnWcU&J3f{~z1=GruI-ZU+*2!y3!NuU#dEzwUa?xhH+w{G;?mebO)e7t?o)>RtYVKI#8o z?DHqVw|~|?{~mbu1LU6U^ZP4Pax5K8`H#*84Eh(!y&<7o&7&(1_O@M@j~b1WclTRz zpdCD$I7S*?={rVF-eI8e@nd9jqO4UJ{mI)!_*MMB$lI^=P4|D0x3l}?VUMMMRo>?J z$w&2F$G)MBv-N%myCrKq+hO`yO8mBt^oidR`@h;he3~(|#^~LLUn%c@4E2|XWdLhq)+elsLg+EHHV9li=mhR> zS%BZH1@ZIltoK{u^qH9d1~^oZ#IpHwf4*m1 zS=2h$Y?otz2NyC%<+rcUYwT}fK4Gpl&+%cy9(3O$y!J9St?urHe53DJJ^>Q&jdX!- z@K#|KdTL*NquO}%J=6`|1N)5Fov&t}@udFtFH5bpP2V$K*1n>3{-*+3N|~Q3jP&kY z=5*CxYv;am!&l<;RX|@PPG8?RT2t9HRzPD>oJQ^8eFC3^Q&q3hFJc!ThUQVM73D9k zw)RiH4WVU-@e|b&fh6_7P9fqrlU&?l8Z~Bb)w^s84+Ye%L(TexeS& z@rO-+B9Ao}{wjg~D(F81{TImwhPsLFdrW%8w?6r8RsTMDc6QqbR{yj5D(lr4=X0l9 zxHznSd{3&`@Aoa>jPW{jo%H>EIS&6^>>75csBXTAHqbYjiK8B9NxF2nBE9^Ut+S?TPsB*J`qtn?Y| zJ7mTGr}JNDg%j)Nj(_V_zY7xT_w9dBzyJP{`mOI>zeS1l3!c7y@4no-emi+z-=}{6 zO<4OMtn_>CVl5y4|0n*deqVMDx>Eirvd&~pBKg94bh+2u1FEEdjeVavV3f|5TRIMn zYpwevYYu=_OE4|;$yJSu-eomF|MeIH0;>&IuzU*@GWw#Aq zcA1RDdE~irf;S*su5`0WXXETIz(#l)kN|B|LC`0~aY@a=m68fzC zThTuw=!oU3=U(~am{65pihftT*ft?oYwquFEFRx$&7H`{-G4yVVw)G&AZw(9GDs7&(hh#I&QWjrS+ZSD3?(0K$)?@Um~$T-Gx> z(>Rv*)$_giJ(IDkt2O*b7dY~Z|0mwpw&E9#G-L6haiR=(KAtjY_k;fzoHzknd*vY-kPMwLiA%V)9NS4*Wjt z9^X;oXiXDF-`L`GPa?nRid%omeqeg=ZQ`EpoVI;NyYhCcEsM8U^_Fhw=$+`T#tyq6 z6TOhD)e66CiS^r6;_!E%C!4_-@r|+K?2V7lv+n90=X~PGmZm@2UVygB3@eYV0V{3N zcY5b>CV6DVeRWQQFFE8VU2kHZ?aW`YCw9cYy^5__BJZ-%5n23A3$CY3t>cu3{>m>1|V3pSi(C z%pWE4yAbz3#lCeaI@QVBlPrvcUNmxka-eFP^}a7>Kww*&#lMx%m#7zAXui7w*Pn%6 zfbl7r(M&&;+@U{XcZi~9@T%_H&4w|I5W{_nD`*s+5Ep2J%1ptgE{!1 zGI629UkqP9Usgt&l(J^#oJF0bM^=ARy3pHJx*eob{jL)&=oAul%Zm56tBhxgUb5(U z<%oTro_ppAf1UTxJ16}Cryh;1I-L$*bA$TqtDLQW)oE<3M}BKBOgz(Gpvi0PiJ`9! zt-wyg!(L&AufXw~Rjx2^*H**-+;z~(K5nzqS0H@bf{yZ4^70L^_7-!=Py4v_1IWwP z|J}!UMJ};_!d%r}VEBA9i{kU|Y4z;!bfNE@)FIGH+1MG5Acs=@<9Hvz8~ko z0(eko_-_&qbT_7W06kUkpkrM;4m8-d8@Ans%IpOHtubk2UCpm_Z4Q&pwBT;&Ogyul zcC9yHV7nyU>5=bI7viT~i;w*^RvE(V_I-hNyl3Innr7>}iL24S^z{iRpLE)@^`IAa zr(1hshA=SmvElI^UBh@qmhD>jfi-p{x7=|)d9ty~%aPqjda#IoXUaeR7xVuE>gQN( z*B>4sljL(2d&y_C=OsTIN;fqj-9>h~=TA+CeV-}2%`pPsnyG)>AG67~SH%aUjr@Cm zY|){$%zM;@yYI|ax=16s zmWi&3?31{RX?a)np^LE_avA==wt~x{t5o-<&MVK~#vWSza~6-4ukJ7vHu?FskvH8! zyVkRhs`*oR8JQOl4~Aijti7&!&Re&Vm*O9JbL35Za z-ur~F%U|{8cYounnep|P_sn@=qp>|czHYVZ^^;8N7u|=68P`*Pk7N;JjQQT&&pC7> z^6=lScxwCe5@bdT^O0y(yR5-Bql5WE^5TAImrPMuO)R!yE$bJ>TYax3PnN{LhjOs< zL*J_O(v>)pd04y4u1V?bFwR;q=Xj$|k2m^6JdaB9VBgK0Q}#Vz(dXg5d0}+PWqq^% z%dW<@rQys@Kv?S8v*2?kcIZp7EqT`3%MRXe<p{E_vRzALf-xq$z?<{V>Z=Q_jx!N=4YyDa%l`UB(nz);$Ct1#rL zzPobayYSmQ)*Ba<7X`Fem^Y}TNApG)TlF&VRQmN^bZWwB)_%a-(xK&8={iXlfW3g!e3;h(6m7^eKOkT5apM#vbJt!1Us*V=df11f%Cupojo%y1~Y7wUN|?!@K=GUeheSH^tnG^ zbJH@yYG+$!#;V?+ zmgR7+WRR6_VBN)mQqt_pNeQ^)d08}D`Pq3bRz8DH(&}J76RpiRSmj5qW?$tR3!`I4 z=2yP;w^?O~UW=}jK<+3j{v^>kxpx_&_d3d{^cd4ur(wgSITJpW!5`MtOWIkZc-U_Y zrGaNNv6teUiNX0L2WOYmSB#pvnb@60kC&i#t}vL7XhYI(Kj<*$x|SDa(4SAX%d8Dn zdx)pO7p4U!#M^IM#mPA0vD&WXhrQcwqUdD4%OcIcN>ACRC+Vq*rJ1ye=b~eCY+9gY zFLsI%bpI?>ZUNH;})#)M}f1Y0H+Q7F`R=Pv7stDPPv_ zMg{j=3r;+iV$!|s`cwHi?FoyYhpRn-Beapm!!FYJqNKrBP7(BUVZ$2ApsjPj`JB0z zZwbl$${s76X102QubbHT|bq#ae zhPu(4#8;Ewr0*}?Pf6E)D0Z>pRvkFmUUuJ59Yp_H<-aFon*8kbT`TI$y5^^Jd?OVV z#ChiuM)7bbwr@@wX%B)6=8v=Lpm|dm=22|^bf3)HIIgn8mlLl4@9}Hv)hW|#Z|35B zPxa8dgpc~3gUP+bDmw%{oy>bG`>t!*r@q!|j}zgnaTXE2ZJ?#gvBM`2ejDNSsl%>S z{&sj5`Fw-$QjcYOlo);!;cj>De2U2jhwACu2K1&!`CmzROLFh-zU8E@4flZ1)aYo zJonSy;kCqnn{f9hr-pA+{vY)&x0rmkD4(2D>HnqT@9Uk<7V>|M@RmsLaFzQq;Z@<@ z_3a`&sQmxdyWIJd`xC<5e?7InKPEif)h9mr{80JC*X?$@uO<8`<+JzHd>&K$KKZOg zrahwkL%r!#K0d6bkM$P(yL!kg^*T?}Otp_sda zvP*24kB`usjlgKi=#M^~5juK@lljoayqLuNm<$$VZ@SLmm)^nFw`4%ywxiX%rJsNI zX!Wb)>CePReHZh?W@lQr6MOBJIT=j`Iu_5|jHc|fy@4_K9&%1u+4xK!`n47x`RcQc zO1A~DuJ@ zo$T$$+vWJ3KZn;2+RI5_<=n+J+)uxZ{#wF$^OBUEA6#R!b!EZ>=4{Z!#*)>MA@Hdw3%EP{4z%{x@Yy9wB=`WoA zH7@7QuD#g5*E#)5@sCx{IBg+Lz0>-@%2Pm__P?0@4yPznC|(7M)_RI@-e>WF}~B|^5HS|6>WK{{udtH^Sbt#G`=;? zTc9OCyJaCWD=EK zcfTD+f2J`|wZPr9%*=mW_XOlw8a#EZ`S)IVUYC$(Ky@Yk!Y^X4D?L1m)s^^`(4#e}nsEG#+T<>%xq!0`j!KHQ&a%YJbW! z_My|G=-r-Ujn6@V^uwf8pHh3*a_4;Mt!Yi|&@MTx`utq=q225{Zl~QOv%R!+fOfR& z*MYq1sBNElDf-7w+EMee&fz!HZc}N~-?C@zTU#B-a4`2wdAji!GTQD#VfdPhtQn2I z;Th(@YS-v)ox@Oj^-$+d_@O<0{VIGraWY-wy4_Qo*vC%}Ttxno1>#fgt;kVxY>Wx& z9@7fP=Zqm7lv-aMl` zy1*GY&YGnHS?r#~{yOzj9n*Nm@R)r99c?%LbNYGirhR*iz^(iahOTMQI0PH)p?b*I zo1<}Xb5Ab)L*pc$x~u-WM<$GoRL0i_H)XoUcZV~ccz@it`9EaP{yKc;RJk&C4Ewxf zx8CQ&+sHx3D+{lG_JI4L?M?2_FWas3k%{8Vxz1fmd{L<hBfF0r^FB!~cVIPJfiKJr-GB#d)o) zbsa6~_@Q@B`GMKyM_faLU#HDK)%cI&K~sJq2bX?P%Dq5igE4&KqjxP=zlf5b`oG#w zedH5#Fn_Q3YCt@49Ep{ix>=RUqp*3LA^1+5CiyoLO}`}leG818>Cl86&o|b6(K5Cd zO+$h+py`tYnuH&okBOdi=t=L+j?=S=yb|dVEuTVD-?qXQe&cuGkIGh>Q@-zyZM>cD zYV-N%=BmD^nBk#Ks-RPCCSNu|m({kFb$^+;sIH<%?V|QkzYWlD7uIF=RM!1?<}udV z$_HOi<&lA2_TxP}68q#q#>27^r45o<9bkx#THnl0*-l3vr)c=7R)-MXhWGM#%ezy{&L#)cb__uS90vzj(> zBG)auy>Y=VaKK*nbU#2w{((K0qm1Qa%qfTA{dmS?m|wNy7Uc2DSOtAB&+O9%Wv3?a z#O6^dJTaJK#giaBv3aCCT(sFQP5CPyC*!TM>0c`Q=aem)!=f2{Rqe#*C;#DHs&I}i z=Z{0HSh-}8WVLi&@|SuMbGGIXt8VGR*RY4GcBPxX1i@B*`yPP9Hs}K0uhS)>>oM#><$8uLvjMv8~d9kE9Nx zs7nS|ax_?S3|Mk3>z)DrF52-(Tn}YqNZsc(-o%jBeyr9(>5St``OW6(LEcMNJYQ;D zgnoKj9=LH5xUm$!PQr$H$iT_;p>r9x=gmuRdXe^Tt*PkQ;~3oiQuY&#f6sZgaY}YW z^-jv50cbxr1>=xNnnR%rT_8w`Cz`pIuN`PFCF|HJ&;@%*$-Q5zjKb!g0=-_-m#Hu!vsaa~{jaQ~@ zndce%*YYbIZe!po7yf#{zTbkbqv#V8!*dD0op52|yanl};Nk8C;HdLY#lxZV+Y7oE zV3#C$mWYR4mz==DX1VC?PUOd&{~vJAJM^hf_@;C@#2b>A-qeo#Qh)n2PG2+cFY*2k zJZN{Y-vE7|K;MhJH}kHtY+APBbM3Fl2ZOK>{?rU)uZMbF0yf<`1zUs4ba3g|U>sbP8kIaTl2w)6wmjTGQBl@S#__zoL3(1i@)L3&E$`X_Id) zTHLc3p06g)Vt6eX9i^Y@y9<0+=yG)Df*UnXhVaPG>QElfkl*6xNhkwax=#nzJgEH<~%WEK(=#r<9QdQH|;^bbY^`2{Ykm$O__rR?RR0fQREoB zBa3-Z?_0reijOTtx#Ifhr#0Za`Hms@Qg(J1`Dz+#*hB7ec=LGBbl+d}`!e(=qv?Bkd17VTZ&R?)tf-(tfxDaUZ`tUz}+Z&*#EzF%;` z_Zz2NSkstuyl8i};o8BsyN8XaX`C{m&iq~k&b$sCo#vyDwi^CV84q2qLBXPQV@3{l zgmlu6bcW%vDYcEC(g!-xGZZ>q-JQ(YpAuhX3(E=Lnd#$U9r&sIemq~Ciute3HgRU`(Hhf+& zWAVl5P1n-jDj5skp$;#`+ss?@mzh^H?!UuWsAMdtAFZb!&92Gp@z(r!=04Z3pz;w$ z^Ef>_@*aA#N&AXp)5;nuS<7iH9tQVfpk4kMx|Lf`GyW=FjK_hej=zP-f=XmU1$b;! zO=IJcFG_ar;odjlnU)d8hJB3pNQTi?xxm6?6<{Oj4nuJpa?LMY;GDhjHQ5KO%aS;= z^ck2yy0hcV(+9yw!e@V{9o05n;InVRkB?mE1|LJd8I1kGjQiF&KYZ}#ariTsdKr(P z1A!l{@Z)j#Enuyi z!CW_iy>0}9-2jff9vz>x2fOxO8^>9=@h9L(=Ta+7@^r#IiQmCneBbh2D@^l3nD^ql z?KoqCJ-o}-W9i}uczR}0l};|UX&hKHq`0Zm-oht1A-6sgBKU}#f!`}tu7cVjTawEZyLP#+dZ4^c4@b|_~pu;%%{_JbS4$W;?JlK-H^-|9Ip&WF?CMi1Xo7ccC24E$I@y=>e#3mp89 zaAOtt5WBNeaAOO0ap;&$+~}#}{zvfJEaZ4ra&>Eor@A%AQ`qWFE^M6)f3D;?_hDzx z?1#^get2it+^abiCte@NiFiCM7ao)?n6O4BST`q*6Af%SQd#%zTMC{@8PGb3J^w81 zo>S3d8?qNlxx97XQqm0vmnIvn!i4BmqMjVEBDzrrK6K=Jv_G6!k%$x5`~#f0MmX{7 zCQej36DNW<+o9(j==wBH<4gQ+Hp9S)&?Fi^fyNi1v6=V&aAK-Ubu4aGJ%khY_Q8o= z;Ka+~I8pw7BnOVrcXBG#cSf0G@2_AjFZghA93TGG#D@dAYi)dp&*_EOQbyvmdZ5*S z)`j@lsvKa^8i7_1v>MPM9Jq{jsYFLPuU5Qtv~~!`2n!;w!(gyf+Dy8=`Eh*chL1Kr z{E)dw7*Uu&|CL5~PyZjBAbq3*ePm7JTMyEo;BS=rBx1$Iix%}Lyqf$^VnyjrdSk^@ z@|7=s8#B5mvgY8C-cWe)Bjme@6;mE;ZDvl(!jDE)lF=%hh@)H+yBJ`_K^LSqZKnT) zGM;?jb5?p&;i&ZeIbg*s$DkdAlp6N?oM1brh3$x|a*N`4(MoHDtpP7S z8P9W{%jG|h_U{B=*m*WT{B<+WS=m;eG45&?Ui_H2ecK@f9$d-%tb9)U9i3kd-&NPO zwC5u5(X1)yP3KdlmvM0w@k8T*dhCGCrE=PeJ8YnOO@X`aAVlv-VuTC0r)Om^Vp2ihGppULMcV9z7(T%n=!WA1AaULHZt*JpYgdc zjj@+<>iArkM*EW9#*gpPH}}N#d2aBdi5-!J(&q^~)}9tSW}wec2D>E((cP{((g!=9 z!8rYt_EbAbw>KN!bh{=87oVNp6oNM&!JD7P`}DW?UulL>w>V#Z3SYhjpDKC()Re8M zCYJuiDfrQ%A!Oo5OTU+hA2laxo%|WHP546ivA7R@+{>7(T5Iey@gw*{*zqy2U4uis zzPz;tyFefQ&4nLL`2=?K!jp5&INs=E$n|RGN+fNsTP~Uf`+ei4cdJ*|ojXp-Wv6=79*dMOJ_V7jGym+<|ZKW?s-|*kegMaLU71vh? zD}D`}bt`!57I4?i;IEs&iZ|kW%km#xe7240EUY*Ne7QE;3KLe`G9mH%fZ%Yx&p*ow z(;V@2-n+8wIAem}=3Q8^_(Yh66_*h%tZ0OWm^O&c94n2m;wR`K68r6Nuw&vm7;Mb& zKV<269E>B|-=KWl8R%H5u+gvL9OgUk8OHMZ$Xuo4@D3;KzlMITB$my=jtHhV*Md0C`qp1XMd zndHx7=Fc2g!#xeUe{pm_`6CN^_2yB%`_g{=&R^2*z3*v_Z5_7Lyf-Mzn_rJ# z#Y*N$$8z7CU0}pg@@9`FN*r|6C(q9xzw-@KN3HV!BWZ_R;adR@ z{by;;=;pfdJeEb(LM7I-u{DS&ae+e&azAaDSTi-fJH}MW*QvF{3DtaoC zXpdyZ<4M02vePUb?aiM(-kU#*HgVG?)$9w(hSq(5#qJ9JmpR&Zxj7&0W?bn#m-jrQ zeV6=D=)Kb8eZZh^!M|PQJim3eAHeV6MM3PwXP-sB=X&!wH=MqRyzO`0<+G3eC);Sr zVmH5b+G5@nPv3UnpKf-pH@`u@A)wIqCGPp1F>|YcBS)$K${M zar9L53GK`5X*PY|pUCgpczy=$pAPRvKUlf@hww@KP~XI6c9QW><*Sx&iz{fm%W1=} zB1`d6r?#{V+6gj~`Y>Kpbe|cvrhnc648*UtLDOvi1dSp56qF zZu)c?`FiMslAYmmjDX6$_jqihxd#xZIRm~(&M;Ou3v2lO^mDC+^lrvCX|d?k{H$<5 z_l6B7{U>Ss2K1}nTtL}6mvr=w6#Dw9K7YgXjlY#)+rnP5`0QXjc28w3y}}##bX`%P z8l9HG+UBFl=={JZKF8n&tzXu1htB>>FAMygJ#wdG_>R4XTkH*0QCn1ouAb z9NAFnXwM&bhB2bv=$WoN?#JLiLTASp4mU=Wa#x#sq%orUAs4dCD8JIf{eip>8aZ}q zb%$$731Ov!tvL1Pjh&k788USYvLT)Hxx>d!ZA1RaKf$@f)2D{PvVYK>NJW<|2@PTI ztjN2>GyR8+nU1U_kqcMy>s+#p-;vxe9mz58z8O(?#$5+EdCZ=KB`dY3rZwhc4tIlO zh|RMt+<#TJuyV%sv$r>TVky%yoddf)wGE9Sho@mVPcnO7A5ML^@nQI~8XgxDe?Yo- z#uW>1o3WgA(tjiwJNFnViw*Fn^8c2R)DSkY;;BD< zV%DA?P;MD_U9C1ei<^;=k2!`nbR&bLm+X$W?QOKJ%H0CLKAGr^enfj||79gMDn8oY z%op7Dcj~?0F{q)Kr_SNo!CuNv&1tGD=l0SU$Lnq9=LDaXQ3usKk9retm)96bod-5l z8=m!*l=raVY1mBJs>9if7S0$@wNUp~`ZpT`7KfmBG5qO<)-uw3KpVXQk18EW4OV^W zkKo4;Wt>gefBd~d{DR)(#qDO=VGXhYyPtN>gGQZ$7Omzu9Ecxt^b?;i>+RS1=pCHz zXFaGr{aos)zP9#lOC~CQ5tw{J{Qckf|0!{<XW?I|cAG`h8PITUG`3+&{9WfX zMichNn>NjsY_QLAYfX`e%_X}X%(0T$Rp59qw0Zu=VWjP_e1e`lqt2O;Q+ygoRy|F+ z5MxLGi{S55yc-FAaI1OGiu=KRnEy65B|nqTrKGX_-Ws!p+?B~$k&=3g{-obnzg-I~ zzqezdzpvlh%2F%Mx<{84TSBcL_J=4W^{pVY-1^l7+%F8*I#Tr|(y%OB9arXm(i_AI1 z!W_Hrh^-Rdu;ykf?A~t}&q!`t_(F4WdN2aL_s%f__ptAic=w-u-(Tr3Exu`dwM@Ju zu(|ruKuc(_iH%zD!QSx;>w7bF{h0il>)vPY=tnc#FNyPiS)Bjzcx$&BJB2-(iThdrL;uf&OTB#G#=YLP$TrC(Z@e$vOg>|TnXwB8)1Sc1n$v}qM$iu<$PeXt zHEF;^?Ay!&vy_2ZO2G$LpjW`3H~XfNdu_9=I!CEj^IcXuZCz}Iz45a3+eQ86u@^TD zo(zMQ;xa2-II5JefrNFEKJ;x1S8iQseNTF^clvXPt8pfrD4zZ8M;bGn`G!udL+tQQ z!ZqH^cj_@-^&sBiKJqLKU$qu5zM#I*3tt{3p2m{KhWOYHAFBr#KkK{J{+N7()g-?h zV71f5NtBxv#<2ZvpKlK3j8!>H|H*Q8{}bh8k)PVZU=Gpm>Fx-#eF9yKkA#i*y?zZn zyxO{5zNZZ9jBpn7t9O85jzw_quC0`9$I%?f+?_v-^zY&K`gdmfQKs)I=?E@noxP@) z-|LaVEvsXzTF?yyh%3L>`2H~cTeq09;iSHyr^3=V81xU}@lCfg?vOoNBe;1#={MRc zPqd9R41Cad+26r$G`cd~y=kYvmND1gnWd@FzZClKHR-o}gWLYXHsJH5-!nh>&+7c- z!QE$#_8xTzL(r$hJJp>z5MHJSIZrdk3B4Bpl>@Pi0m6UGQKfSY*G z8)XfY?;8-8Z&z4(To#YpMO@88Ul2bF`NUY6bC9ykxt(v9HCl7~d&n>OrL^y7iMfnG zRu+CI;I-yz^@m*bdFdv(2O{x4mgXm|GTSLb=cxKSL#45M6vpLI?ddJ;@ur zhc&MH>wAN&I%rL&z9wBquM*m!7OoJag4tec|?@O5{GH(&GnJb0kB_P%0dg439e zkCP>xnlHF#559KSk!BKDSfw{l-jwaeI_gbX;2|BFAKX>9P4{zv_!kC0yJ zZYCXTJ~N&CW`u9$yw!Lx2Wg+^C+++3v^S7;53)yPnrUw|qNB#KM+PRS?kDYzc-nH( zu1-ul*N9#;#@t_J0Q?996powqJGl)`b*2%C-f8ei%a$2 zh`*Z9RK`F04%N%7Qx11q4za%9uJ051&RY1ycg`Mc!wz2QFVJ_+*m1Yf$?y1F+lH@7 zeIL&EO8!T3xaTNmP#}B-d+IrZ1K}602t;y*@OwGGLj&Q7U*&gLAOam>*4B}n;eiNc z$}dGEXM}miPWqWhPFf(8k{&p=bR=nJ2ckKn0_ev$tCtZ7&&3~g&gejF2N-n0fc#54 zi}JB&Uviv%kQ(lG4yDY-58N2+46fvNEWc&^X7D?U-_iVD!S6_ZOZgqe@8$faw;E?! zbNBY!@Bupozg>(m<<&Lx%2tPaK&xv8eijCnweC%u#eIB9tw~05YiQsVyqEIsY)wjX zwRR4?ywzzqS|zIubZ-WG&a9#RBk)-i&M7iwlX0F=eweUB6N{oflZ&ETYPZg8$CjX@ z79T4sRL1buo)kxvJtO#16m=|NFZ?WHgYwKZa(mogVGjeIfBsXQ`?K<#m!tXI?Z4r< z*s5!>b&$N*T1stGWSeBxe-E={PU84I!Wg`daW7eQcR%TV6;F4HtU}&A$GDfQTF_70m*QzpkyXf> z*BJMbRptGpZH}isMP}evV;ke%pbfs-Pg;LG?J08Z3L|=mai2xnY5kd&yUR&0&E~~;Ht-k;Dw|r->pcC8C zF!qTp18Af8*nevQxE*_eqm2DXPH7-Li#y*6vH!_(nlj9jg^%ACz8dgkaX-_<%lJ(W z==`SSmnUn0CBM@2KP3=Bj)lP>o-ED|Q;x8SC(9EU{k2q6mW|087>KMZ3yjJ_Ue4CH z!GX-XjA&-FEB}Q2knVnm&O=%LbM3vxnmE1<-Rg~g?N6LRLKaFU+|tC_8{hpJ`x^JB z8}p^KPYd!u@vkPn@~;oYHh2>DG9BENF8r+gFC%~5c|M9YOB%Q)d`(d_GPfw&Ui&68 z>o>@(5v}dD+h&GVX-JC|Qa>ygZ&=n+$wpApRc zA#I|)AJS>vu#j|Nw*y_t!ti0YW>MqdTV@A8AK@bIfeoR#j~KeEBHlD7u3S?#!+ zJuv{O8Q)H)_J12pZ?->c-(KwxF1FkM4btjvC6y&w^*fXLA4kWW$V;$qx#p6#_&VWr z^LJ$MEpv}BnKf4$SU4R$%t+4eC*$K6UwyVdiM2_LyU58KJa72VU<{jjj}-K9l+%n3 z=|{|MtMN@_pksgLi^|>npVG|w%+wjyx$%{j!ZUz}d3iGPXq}V1(u40)zVu=Az49<& zM$NDBiS;J7EH3G)1UuTZi&!%GwNT1gpo%YE}cs?M%WWpj= zo?kFeUqc@X>FNeD?=?Aid%n%<^ync7hIes^M z2^T^qX~L{~LQ9P{>BjJNGQ5X8V$M;Eo3n*^GJ-|o0sZ%w`jh^H^A$dW|LQ}Pe5-eQ z+afowo)h@<=DTHg;-wF5Pk8U7@79g-w#~as{VzROD?J~$`fSS7nQZN|t8MgOd&~Gy zH_vP9?DlHb7+uJV@av_}tFz9|ZYMkqQ5VCP46Yd+bkAY$_7|nporn7TiTw+W9Zw26 zsT@z!_-`74G1oZ)BNsXYX=9A2&K3QdwYN!=&!Wkh#5o^mI!>Lkn4`2ter5#c^u?py zgQA+JG9NMm>7yJMi9X#e8++Pl8^d?$CXfE{%4l@=mC@X^u??ZgoWa7L{}#$Q`o}@h z=Ut;3FoL@7qG8x_OyF!NXKH_t;%%#Tj_O{=-dw%Q+qRJZ2dA#P)B*;XmmBCn+h#^x{M`z+5hUBrkym+ z@ifkO8XswDQ88?PmN(*7Ed~RNBmECXq_SeHpy=`;J<8}7dIP2>F*2mRPl?@-%a8;3E z+P}W!sOWjwfjv03pTnH)9WOaDdtOd<_PmXLEgOBI>lTZ0&EIQzQ3jY)S^@|?B z9y6o6nLQot72lGRWacA%YoU?R?b(A46xzZQJWXlPp6>Bt=PCK4ev}3;(u0{_FxSHi z>Bq8PH_DH~lh5c|C*yI3in03%cLg6N-fJ6;4cj;WXiizI?e1-iHOWmpdX|Ss*Gamq z$V5p2@PPE$$ZVzk8*!L77-MPOnjefqw!SVch%diq)IYv^aKuf`w ze&$C_qxXKSc4ATF0%{WWG?#*Y8%#18|ecl=0O82;;6yr z>VoA3op&t9hvyr-;~RD{?|15*GaX=-@YYO!T7Wr|KFTEJ4^uvOF!(bVgJYP>LbnpX zVtK(aFinWKHhn|eDfd`OG!iQ@Wyx8d;-l@@qDE3KW9@QTbl=o#@2|uLJ2bbhp!3GM zg2i1$Jp=bTdMd!q9mvX-Y3z;s8}k^l7XACKkFXc$<`L$7G#OdI*ezo&yfXR9hR3tV z#Lu>mGS7;1W82`M|0=H1UP-^)3!m$Wx0*5Hp0c`eKJOm`gxd2Ei2*w)nO;oqd< zw;#GVqY?RIYBaou2c46tgBI_NQMB_o(~t5c@N+9;UvsZ?`to}a{!M9g8*;=!o7G63 zY%RsM$2VtoWo*?gKStKkj_yIFJ*7VBfkp+7(8ojc%l2w)F`2hjPGHjPKz_EnA(J{@ zGIe!h*S{O(v-TF}R}40mIG8^JSL6TU25a9)adN58mWR+|U*!!PfyN^fru$!I%**G~ z%0*Q(3jf1cl7ugN?L7!rtX%ZqjKVe8N`B2K=j^;c(}8ag_+|GS^>OK)^RN>?F|Oy+ zc8t$sj8796j2jS*W%5kq$r%vsf+rEilFq4^`M@XHPweMz=FY9NqQ}5a?4hD-^0wJy zU*oa!vG zY-lOZ_js1^RP)p=FNh8FL^r#JZ;HJ#a8t-Npf!!Q2wlp&nt8d|FWt%R2DQ~~W3744 zN52s6Rh#{#g8EOqJX)`IoP2q-^Zd)BNsKqOe<$;Wga5*p<9N#H3xDE#UM}sH-dUWF zepvmAd#%0xSv9Olz@~@5rrQbgY#o-bdU%K9n`3zG%-9Zhw6xR6cYA$vO8u_;%rOR5 zwb~=u|3}tIW;=}J{*dL&XCCZw*t?j|e>;wgz41^TeTedebHksVY`Y@$OWH2Nw>)Uc z0t4qkM;Lk{(Dn5n#8wSUV*L$&-9_&`&0X}-@TII<=#RPJpl$zl?W>ZRcHXIseGhr; zd%+V>pRnq9j(=7WcCdRL(Fk&2U(V3{yzB9k#oEU%cQWs0-0bQ8SxWy>Qowwtoo9c@ zy+YV;%;I<7`OZM3O;Nb$v2jTI<(0mwdtvk#Ou(p*?@(H>B z_Jxc&)(jntw-B<)%igW9>Yh>Tn~}!?`olK%qdY&W*!0%JkIh*$Y5m>!VVdRMUoj)- zVco!g*&J`fzV9{a#F#K;mDZ|bs2BbEsjB$>NSqV5*7GTsM?c|ii06@q+Jl_OxdOX> zUDSCmbsPO>c*0zyIX=AL0+Z)k?y>feTrSR!@x7BaGvy|{QhA?$Lpn9aUdH7o@|%*- zw}r{ox7C+5Z)*)0LjT|LEcb>n#zI5u3PNc-89d{7GI?@%Ch|lxa55xyKw_QA?CPxisAOv#mt zd-cH&Z%pWed{ZA#pV~vX*#{*<<9+ab!ou{y@ENoz^WZCvgsLPK}EH z2U#1d4fg(R{{1$6U6Z}h*U`DD4Z3W6X3FXG;ETtg8#%4Ca`^^63p2X?(K5$}1xd-z0Zn}q!Gfc4y9 zJ$KRerzaI$`)VW;Ip^@a6+y<8@k?J1ZX1YQwfJiC16<46Y%lGxn|hdY5puJB-W*G} z$GSDN^w$NQ-}`kz=;;*&ovT(b=kUL2MM2~kJiBygG_-*4cMw*`_Z56!%=?|ZH}L-C zih_NtBg?46tSR12-ITi+nYd_*Yg6Rd8Ev7be?y+XDQI4CNo%NyU)QRckxQ?^-`(h* zP#xb^=-Z6ePy_#;)c^d}s*a@m@UgGAMbePj4x_DVO39|&DaIyq3{*c1eLUaeS;})K z&tjfCc#tdVrNDZ{W0=bo%$-=#!sb z9j%w1kTINom8VI#Yw3K;r=bDn1Lp+_+3QVW9q9#UXk4ezhmIgCO!`(m0v~yvdpt{d?&MkgNP*_?aQD^!aQ^<}>(G1<+BAP3xjNc;?CNN`#@31}0weg9 z9MgVbuA|Dd?d#}_%d*Y0qQ79@F%|k$zYm4+nlA5kCMWyP@wkM)OxzFA)k-#NKC%0e z&H`<|f$|u`%b)od<&k8OWKuu!s4qRpOiKQwOJJ>0pScDZ%W5^|LF z><01w1$d*mAW^oj6~B<pyvW?#MZH-6S?&1G%G!TWx4!b` zLFnq+Ryq%B>XB)yYoW{ZW!CT}T+pexU@`A^@=iTEHD@g4{d>GKcXTp$bg%aY7BXg8ceE{J+?FxEOl-s0&0!_8a1VprakFCJJp=zk7uzoPR1SiQTlA_0T{i(_`g=dlbME1P5f z{{h1bYYW4Nmc2oHzCnA|(w?=nC;unba{pLaG(0yc8d}WvI|&=g_cXpQ<^6lSXYf9b zcXTzK6}-o4@Y~7z0^aZ7J%#TBYvl)aQ`mK8TlkgPo5HWSHkteRDeUuK3Le|xD=1}6 zR%N(Z(~S*gF%CkRTkt;!T{-+t%gxdzb@WW_)Xz=Aio3o#TQrkUT1#T^^LaDrw2k$$)?iHM%zty4#J+{rdIC? z_9u4G?+Seds~PuU*K>{1Z5XT$4t}|0g8|oK~HGOcWp>MMMJ99r0_rkm5{wb^Qjar4jS^3Q>mH!Fya-%2G zT(EM1$Mi?G4c(m6;b}P0w<+J*Wc6#eTi6tN9-mwO6}?1yM{j+Et&8aCql@_H7nIdU z7ZDP+Mh9WeW5`a%rKtmuJ|K*aB7%;huRb7*J|J>heZXi_A25}AbZM^mC-ebVd@Z(W z_Zi0Y0D6uV^c=ZyJ;y-!rgPBpqoDQ(CgbZYuIsoe`KpE&&mI`}yJzV-KEgjvqORjA zNn8qh2WoT?cNkG_7?S; z4?4hJrrag`z;9!#YKe0K3#BlQ2G&?uXlRXvg>sxHvCxV$TTkGi6`8Fo{a0+-7rVZ# zdvA(=(a(^jE@xXPrGq>=7>mfLr+{1F+x;u|M)MxhlIt)W|qS5a3G z-aC!4QQYQ!I%QMU&%B$$n+Lc3)7Ynz^DDO8)tm+vw{g#u!`OTCEsr(zMc^KdEyso6 zo^h;4axVXW(?@7eajLICGP-^l`{cA)U;N~odV2iy`mGjz>V-MbFA8(4<(?^Fjys;x z-mH1Q>MoNPE@YPr`z6;{o^d?mTSMKxg4iD&(bzF(TX^q4Kfb=vlZWLKZ| zKY<4bznk_K#^@Z)_z@;shc4&^o|kym(^e)=r8BkkzxbYgrjxPJuO5gs>t6KccDh5v zKY_jDdZ2miuUT~&2K~c%M)0KZq_>9FQ~~_nbgB*Jas10ia(?qI*&Uylf569)p-bZ63`NtY&_nsjT@u}Rm) zd|gVPs81X#jFT?(A?8@2@7=C-IlPg6IBnis6MssVdvG0NWqo|CxW2sIBD7nf`e5CV z))4j=RR>N#E)4p?>DCqdZ?VQj$i{*x^b6rY`bHQW7^Z)mz=0|B9UB9t(1&dNmjaD8 z_DdPwYR^q>KR7^rtKycnL(o}&A#~2&ivD^lb2aqd$}jzF0y0r`zKyvcl=3y~NuZ&- z-J;=-MHUSyw-8SGDYx=aHgkAL<)+-sLz%~l+sv|PdpB5O^H1F8E+2VEnTJCu<-|#4ToC70>VGR`|3d5ZIfP@^fW1s8#lv$g53(rbI-V5D zE^3Q(JIwx3e<8B8+R+wD!7gUSpw`eb^19PkaBy#O{!#qgYOmtyuZ*~xe*^zZ`M;e15A(l_|Bvv01^+MS zzmNYl{J(<#wfwJ}d4xGov{&VhxJmmk!o7Aor=w%6oOp6iCd%1f?VI)8kGT)}t^E$_ zg=|or?jj7m`_EMOx~Z}9`QwM1XGRR8o+iLTo0pXjSk^w0QS z%!xkA*zD98wfiY!QhP5a`YK~pdpWuEQ|>s}q<$Kf-g_v0G);Y!HqPMpQeQz2^TP#T z^0lKZ`m{IuXZD^nujL;F!=SUvJloj6V4_jJ{Mq> z-vdKi`;lNd>8`bQdxyQ0BQC};Z4={OiTSk6Pcpx}ZLUpd8}C7Ud`PF)m*yMmESg_M zpUE5^W)6=auM5$m8#(N^YVCQJ=6H*yHH1l*DVn%Ez86hfCK%;OG!n$^|KLk&Y)s=DY`MBWil*!qF^eqpcyn{-17=HTi z=Y%`^gf|e5Jz%=}_%+zeXfIsjku&#ZzZO0@lQ^FcC$a1r&VP3uDT?krR&=85D=7N| zbdjNOY{Q4x$*4W~tA&KNe8vfE0(UioU|n~(0wTOT;E{gF2hM3O`1+B;kJ z=kb1URUe*D$lyHm&@bot+ifSusIm7X&x=N$%JVho_4@MMLz(j5K|S-KGupRas_R?W zSgL;LR)a0c=p5qoW|LNDUht3H)^FNy!<@Gs`A_XbSm|}&WIX+F(rdh3=CXnGNWJZ?1N+MM9l)>3TkfBEdm08T-?fJP1@a+eTl@DJB=Q0j;S5>`p zd;>y(dmYB_7{~sc_RHNxtWVLybRjEG$WGEPj<|ba@dmMG4lah}6DRkUn`gA+=bg54>S8ESh(0qW;VLkW_SwdhPZoBs zU7eOJ$RbSh>;r_$mNA`nypeM5Uk)>h7F?w)Y@|-cgjiU z@C>?p-Tz>KiF~Kg-H#?u>F&++efH6p$M@0ImfAZn#s5nua(kL^5BuuE80J0~?J|IT z6vxfI)As)Qxx`caN%8nQCA;J8BEJ~+|3mS82b=a$y=WiRFYS>V|f5Qn(x|kwsDN?Nfcl0{*v+luO#a7>F*Y8gx68FGoDA^HGU^+D_h5NKk+8V;~k2x z>0gj+0{g`6E>6dDpJ+!8w9qd@$O5gw|C4@T*V)GD1~`4o6R}lU&^Ibh zqrDDlH2Yavu!vvrP4>3>eGnfTZ z?f7f>E<55d<*O{mL!PD@(MEZSn?u~tRMu+9YQ1Obo$pIl=J-lQ$o z940v{-K`s&#t^o>kJn)9S!3+~ci+*)#xmgD&_d3eG?w=?(uPDz$ol1Fv+y}Ie`V=}wc6B8vrZ(U%fqUtbUw#fbWoLvlda^wa6{0s7#2Rsx0bazmznb$* zIjMo1bZ=W1bC>+vc4BAdq>UGlzwG>*i|E(1d+d8g+k@B+m^woAs~cQNJEdc_@);Hg zS7W=**@T%z#zneECNkA9_r11yxtF4PKtSok>zw{nWJ?QaJ^V(78qq+GYiFMRPfiX5 zwmQxK?s?$5iAkmoB;-$hJ~Yr*u$y+M;J&Gnf0K<3dxiWzD%*DLnS#e;+g@`i;|AOI zlFQ_eHZAyj`h`JXkv+fc+eZ$-zCF*fZ=YX^{tWqFI3Am(Jfj>xYUu|#XHh-hSW;4K zly8|(ls68!5&~oHWo-mLG<~`0Z1w!KqNBM*M)~o$P5HYMiu1`c4MFwe5Fo`~wKU9)Rgk9^}d*L{Cx zCca{Q$b+Yg4ZrNAj=~$!caZg&?p|qT+?A#jokHJ7=!tbkqc43`eQ0Zkwg;fCrS96E zOzzr~9Y_Z^BQVxuL^;dg-@v_t)%cci zAaj0BTvO+Ojh6bF?@a00ywT&>w!6VTLo`OKrw7K;kM%v_+lJ7M-e@~A*q#gRxuN}f zZ1Xu=>N~T&z_#(~WM0o5*8V={iP~6?FX@mRU5D=-_~&8Wb(X_DdoB5E{&Uf;+q=jE ze4k65!u08b?D6RCn=eaq^@rv@ySWdRN*?m_JqWsgT9?+-J=q(r<_ySBT_bx264qVq z3Up7j!gN>RF~;Lj#($VTta@_aeRqhx$I(TGf8cO0do$b(Rzsh~{-fHD9XET~)qcm4 z5bdpd?IQHueYAJ=bB-l7w3FLaaY{SiO*_jbUv#M-<}&^VIq(^fkiHptpZI<*?;oLy z3#|BV0DpPGLc`N-wmD;1^HU}Gp~ZDWkJ?}7HUqBfd*qJ=x$NIJ5MQ`?MxeztswXke z^WdrMenm%O_^*VCCDTtsdkM5B#{HP`yoX~ObU%F!eIONoCt8EGl82jo#s6DA(|0#~ zaR)cE?(4wTE}eKSuJWEv`mX$!nRP)n9i<*?>dJe5S~s^xxK4iOl#gVh@*0T$jgQ6| z(a9GKY+8Dq5qRL+jsSZrQTdI|)}1r-^EGwpJ*wMd(9m4Bd#3ub@?3-6QM2pDo)-FB z3-|7+AD-xQTOB*q-)8@fF;1Fh*Y{_(kVbc`im#Cz`XJ-RH_Ey{@zuIHJ<%23=zZ)l ztAA>oe3vp;a{eG(r@iAboO2p&`r8Zbb+LCe20Mt+LDyH|AL%01*e1v}n!8N4fEDIb zSLuFi8Ck=)aF0VK4fNjckxs?M9ktS_petAzMpsb!)`9JAOQ#YdZkX|e%v{ompTa~L zyn*&|ldq4qG0z!|S3i|6v5a8-U}IzZ1c!g#1jo*IieD(gPrhUFMaq|_*zx} z4C6PT+Ue+ytTOM;n*^R)5=rwlMwovJ!FCb8qbo7P!)ebphM9|$zt(70j|Jlo0~w9l0voW+?J&5?ySbB3-a37_jO9w(232OAdbN-yqo=4bgEW@h;x<9VFt ziJ65D8PPmPl7ANcQD;ru+|SEc**TiEit8W8g?tw_2-Uqc(>Ds4UctP99MFD3bIJ_U2E><& zzPj!mbDSKaZ*E|mwA9^b%`NmTjTxQwY-b(g`15_cpQioha@JhES&8i zEHcY@<_5~O)2VEeJ`d$kZWg}IMMv%!`1L(=ei@NnvwWuPQL<_tp4q*=EHD{)wGSMB zah@ZPwUaaV?AOIyUw+@>m+?XAH4Yc9gjU+;%i3ndKWLj&eZQ5xZe+6NJ})C`WNFjlYk!(-3DtJ#mq>{~vqr9v@Y8?*H#S z6Ua<1K*$9MY9>J>35r@E7sX~0P?MlkAhp%@Gzr)?Azm7LQED-h;3XmGX)InE+9p6- znW5O)f|d515}@q?@lvqWQ`=J}LED6QLq&;7^Lu~xo=qkpvHhKXf4{%Jf6QxU_Fk7~ zJ?mM|dT#4k6R0CS&uJ?z(#B_a@94^E^Hcnv6)7iOW=7<9c4U8q?_07*aNXd;$~&FS z-MMLI%%QEUagRBrHHCGiacp3Xzu<~F-YNONr=+l%^({Hu9a-Zq0JF|fTky>IlBG8~ zcvZ}$HSkFqyO6P1fOgjI#bpLoY3Sv7yH2snwGT~owvf9 z<6KW_uid<2brK`^MBzFZN}PeP@YzyqbAb zd(=HVR!+45y8Vie`F9Z?J-}rl)_&4Wu9C6JXZ^}q15>TFu>_ir&avm@xYOF{onvh1 zBA=A4t0@PSotLyYUp|uL(9tp8S6T1v9H>s6*9*ua_=#U(8^AuWP__Xxq?~c;=h-pZ znHA~hz!>kZ7^NQ;Z;EWwo)~{nxwGDxGj_i}8Kd1_=Go)hHpcsxtoOsm*FwJr`*BI^uGuyCT45Z?BvCdzDhYs2d zlfz&Gd&~1Z_S`>#ZeryscSGMR4dR%RL+8%6;{niHjw?<~IS{gW7EMR=XXO!49|nFb zS0cRG$aq2Heq+{fygz=S&NWdESU$OU=yx^0hvNs^JLLjdB|&WbRy>*RnlQ)tt@9lL z?wN3T{R4L|Bfk@8Ax(03YhQHes$rRA7vde@F_M8jsY7*rNp{_@ji`Go|3%Y_;Qxxh ziH4_Cx$lcj?nHpEoO0J!BL_)0vh%PhckxheUR!QP&O94?&G9u4u*VFP-R(cA@6pOD zcUoD3FqyHZ~RAF7?;f8eQpn_u*QI5^z53tm!aFBiTh`8a2l8DFIYUy|3F>l zf%dPMDIxX0YOpBZM>)Mund!gX;ywC(x&KM!?v2RzbnQ*JnP}-hvOV z{A?qsZg4Qaj-1~fV6QaI`d4Rlq}Q40lhIus2vM$M+TvB^NgeI;@PjGez1>gXcEv$j zzb_9wsk*i4G0TgR7#sHpiEs5mTRGDfJs`wX; zuCz|S4h+uk8h$I6{fD=VCr|C@`RvJQQg5P{zwM3z2X1l#nxU7~5C5y_|0M9?)!Ws* zT=^rCL)Y<)oJhN*-$>^<%kYrzI`SOYdbh*`KU_<`qZwu!38XBad+(#>57-t*PA){W?HTBJ-I-_F{r!5{3bJ(Zn;j+23owLNsHH>nXmoBTgY9EJShIz zO4-#Tl_%fvkg~&@7(5gui@hCvYX#6?R|6Cw(DNs z%*>8yM!)xw-v-Y2@o1a|-Q=_OzC@r#)lPC(nzF z-NmH6IO1C>u~G5c z;&G?A_s7YrUinRE8yb@Tt#X4=KZ7#tYlRt<&qP00e?IUOUFVqFm0Ow3T`0pp+2$8kS{ z@ZJeb<-k`t6I}Ba!2 zhw;$#Us?--E0?vM^X8fd za`3HGqA$rlwV3=SMP__|4Zj2Ax_p)XU#0&>e!E_d`NRxt?JLjn-F(Ac-AmSZnM*I92Ie!1F_G_RM>+FZ ziH~qmQt8*>J0TPOx!80aJeRS=F{iIGr;FDlY|Q2NReZk&PG1A3OPJ%Q%=o81!M!EN z%-nU)G2d_Ud!p;Yb>-&7P$hHS$vmtAAKTdfo@TCf=Sc^B1?XFI>gW4ao71hAw%=;3<-CEAPmox9rWg3Tc&+C5LxdwS9$rb;U&z0Dk zWL&&%k}KgUgWr>ki`HG_niR^|YrX|9sZc%D(cJ2;-M!S+c-8)5=C*;pZ=ml^a0o2c z{A!O89Gc&T5%XKl{5tip%ac3EQ)lR#c@EP@IrDt_J@aY3*EjPWW?svg@6+#@SL?mL znRm@^IrDz{J@ae5*SF>$8Z2l2PrqlLt@rwd4#N1Z%b|nQ?>RH5_nADu7kZGs7_BQp ztB+^lE0YW>-Vg`hgf{xi8e7`q;JHPa#_@TL>)Q9jlWUPf`?i=t=`Oufi~-?haL^1Q z-y!dDAJK( z+z;M?UIbsIq5CEq+e_fRRnTIXZ{4|D4czd3;?Yty(!b{$2Nj?W{-kp1|REuo+cRAHH%%yn=F2N_Xty44d-h$6r(&?0|P{29I7~ z$xTgY_0B5^exlK6TLO(9F%$duvX3o6XDEXH7D9hs+E@>*C46XFb&rvk8=A=)5}Pb^c}X+?k30jCPu+o5H z`aSRqM+RNbJalpHP;$NYNy%@IvX?fZ-)w^Z0*s4#)|uoc%09Hj2!6|ozc}*_1maAN z9|aekFW(9IFEZSox&U)7yYET%*EGJ5MEVWW?{$%Wk3{+n(=UNH z!}{%w>emNvP&>ogr_W}_e}vf4J-}((;nG4A&~F5v;5@l1uiypjIS+vgmE}wgezD|! z?f+p{TC0Q03UIj}zee*IICI(--s=__8=Ur)>))B9gFM$vP!G0X@lg-M<8c~vyeWbVe|nIn1b$D``WUhMGH7c*?0`Z?xov~g_y zC&uBOz+EnPdN;P*d0?Y_`Ge?{J;|+%XI(qI(Hc)2Wjx4$qm5}w6m0P1e|k*tuG9NR z^q4kB#+1XD{?&cr)~o;D?Grap{$JTAGS-maZ;dPH1&?0hBSc>x8RxVx;gIYp`OtDV z`MEr-1^My!ieItLw~`k>Nd6Y&$DOO-J=!Cs^VMKGZ6;2Tm=-Hu5c}dl+~>rU<_?&39g`S!Ms>+2+oAzKkUP;Vgjge0ebI5=+=apYQmp2WCT&v#3# z^C+*u557X%Da1$C(DsG&-AUeKod@eo$B(*@T*%6c7CUbHCRZ_cXN})p1H6L0(c)pr zq0hpXj6rtAck!)cvj??INNb(PSUkv`k9?tgb==!)p3eae za(L`${d(<5LMmTi;o!G#GQna_Sx6S<<}z-OzQe9?T9AE0wG`1}_6^3#jA|A60*%3rb=8uQdLVQOm* zyt*&h7-$Cu(SouxM;(tsM4h~%F-6(i^=5}3{*<#08gzvqA@m~oZh(Wm9+}_L> zZX;I9KXGd7pIp-pu3%38jj?^|j>3~41($G)&*I|jdO1{W}Q)9HU8**}0Dkz%z^eXD)yTkTWdYM=V=PBaGIo1(i;iMOF0 z)y*dN)k7=FSL+@f>kba<%r5=2POLjPI5*qvUd(6m_EdP|?m0&NmNfRK2pwmNj%Sqz z-&YKR=yiq>G=66Do1Vq^_H%4GHJYBMK+jVm^qdPlYdv{CHGQKmQMC2Y0$-B8A70=~ z)%PO{eAD#(?FBw;6rJvGE%1%g_l5<^3DfC*aKRw--E*E%F)-T|9HMbz3C}euz736M zLf>7`W)C#}Ka$Jn&!TbGr*P17=k$#)@?3N-90p?HtISbgdceYAAQryL95r&s;fsth z41d5jPd*glUbVJnfDd#sJD;?HU6xp8^fmFsKH}sukEXPB^`x{Z=jeaZj|bj+U;_Sq zcx#?%3}jcwtyXNGcP95{!uOT$v55bD$S8r>5^LQBrfaONZ^O1IX?2H>^o-*L&wvFZz##b5NW2JLqA-{8N#wO)i{t?fqID*fX0(`(gatxDf= z)~buOYOq!Zh$Z+X_$rN89?faiK3)w>eZZ6j4yz`5T5lFytUuk=PB6>yOBf1p`3%s5{pr>~zJ;VT-v z@ck*FMZC9gKHg>HJUxPQa-nWyjMeV5&^uUP-0@N~F|E}omXCRAD35x_X8U)22G~Ai zzc>j!Y5o7BHQMc7wq0w}Pz+N%cQ+8jh4%?5rBHz}VOmx=8yz}!e{5|_!Q{K?KMZ7EIorCvF_&=Zj zJ#!tq`IJyKa?+1@E?Lrf-pI2@d6uiX5m|3G|Ig#U>tjycG^_4+dG6zRxINN; z=Ob=RGI-}g_n~i|#eKz;)qlaI?;SjsPhP*n(DH&0ky|-iWHPoCo=ZM7Bku*1Q!dH* zjjSfvoacqqmv2D-)n~TVURuaxpOV~J2z|e^)`MKeJ+s!kq&vjs+-7pZ5Z3P zBG2oNt+2b1J3NpP`TsWKs{t0RyJPI147U!Ny^L+F+?8=+<-X|aa(7T?=&KfjB>i0kBtJ*mEbH({t@gaQUH$pD= z_OGCRMuIV*`SqA)t0(f?w$)%ixQw_Hudy`af6V%3#$ER3ILr6Hj#!b|wKWCh-uPC( znel@3D7BGYz&R4m$%Kg^(EsSTkLb9vf|uf&57Gy{L|KJbeyl+p|DcAhms(>1Q?ty(K(S2BNYbU*wpbI)e)zS77mM25J7 zSVyPr?7t7@#Z#s%$(US2IZrBnz%}mr1I$OA*I!u2|M=AOxs`j|^=|wWY7ZNpRj%fO z^tsvI33D@w7u8n-3u`kaJ1Kpx1Is>as$T(?KcW{mxKjG}En>aZtuB}ZELDpXUpN(; z@3c_I`TW1!$XA@i6xu(5tWk|l-Qh~NWImlE3anaP;G$mF6-M5Ju4%2E*k}I^zYUbr z4)}c%y!yEp9eY|z4YHMFt?J7){`A&Vb6Ts){FpKUk1?-rhMyQ%?n1z?navv!4U6#$fn|GS8Fm~^a z;KwL3@;(b~aaRs`4$==Mb5`Rdd)-;sj4T}MTn~9FGInu~0=9~GDR&lqtWCmZ%p9zv z4gaM^-n-2I{&Tu4+TTCpWWMNIb$qB5fUKcVq?P6n0ImQ{BN?@tN7MkPs9y}kvbsy`hl0Lea%Od>aRXu|h1Kbm! zI|Wt|H+MDbu$;B>Bi0Feo;9DlnO`q+Ih%EIGxNKI`Q5E`!n0h~i01q$=64-yq>?q# zb!4z&b`pL``YvWI#RJQ1uhF)Sxz)2xtQYwvW+xhL_cO1@+!HJ~iG8oQj=f1bL7!rb zSPPu@+u*yvrhS{YObfj{1N@!G9Rj@1=e^E$_{SUhcY*_*hq3U(TwTPws*8-g_rX7Q zhePkra0dsu2Tyxb?`+NvTxgsMuVf$J#~lt+X@@;c{A3SzmPtS7{@TZVxkkmMtOKWA zKQu*Pm-?UA;Kud@9>c5dC`jd5=S*YXrOK zbAMAq^#6Dl>*g%?^!}je1lV4m4jzaDxRAd0%%m*5im@H&NBXh z-|7%?0>UFYvgqtD)CczbsnjoJ{F|<~<;RRfv%ZU*E2@+EJ7Q{~Lt=mqc?=UelPqK3 zH**a7THI$WE&r~$wBZ8d;*xJ)c}Xxre_gDPW6<7W#cV=*t*E6cO*9N9&iEfR@cU@vv^7s^sWD89{aza zI0ftfQ_Z%Yjx(tG-_l~g_c2HM-`8ya7k{zddn4^(1GVZ$+TVn4y!h()YchyQb?8X+ zlM4L^roc!3xnrF#bGJrt8_*Jn1#lL~DFoeZd1x-EkE%)?#F>a%8SDWUtcT zhqvf%-z#oHCbM*q8vG>M@3kL=BWEx+KW*RjE`4tkcP1F@Q`M~R??;|%47#(rvewIo?fS|IrE}MI`RSoN%6q<1`h4#`qp)k4Jsx+zU3Sqy`@07^SHDlu z@5pvs@Ta4U>s;FDdd@DN%x?$2mk&hRV4oFy>AZ{io&C(tU18^>P#dau!?Vsi8&2h1 zI|{rBy!&;eeP;~fsd_h#cQ5a^>tP2;`Snxw@3Z*rz;VNG?PoT;>7kR1<*vvxwWqoI zb(hn(Wajh;j?zP~QTB%aa^Bf+m00+m8u}~me*K%_?`+&o4ISa#NBhP+ha~6Q`sPG( zdSYjGC-TA0j7>P~CYPa`e1&du7rL1JmvTP z;4$R!GGZXp&@q3`GsQ>BAOEV`XnT$I6~|a#cTWnv=AK9lo9np2x+kwd+x@F&KiN>2qI)UmrnMsG4R}Y=>W~zfBQYVLN>Lb@;ZP>70^eh3)X~0ozMi#EWy%9} zDc=Fg{SCQMd?ei%PmYE5_Dc5l>+kJoudL&HZd?0jkvl#_?i)1X`vdTfWA0ds4nCxw zo_&Bn@NdYk8&AaMU5Y&Y4dnZdI$Q6$6dC;v{x{4<{wI#>((8<+wHf{gE?x6SVp6B~ zcN4pH>6&R9e_%}C)}(kneCRdo!Ct|GZ2a+tmo^qQkURUE2G?^2mM!2j=su^v6It(@ z@MVuVZrfh=sw(UlUT;_XdeghDg#Y)Ps(StiaZC3sO`Ux2nroi#@m%xuUw^as>o-ig z`hEYK++()M*!WlIRJk%5u#uFpZ()NVUea}ZFZ#~k+~Y$7Pgg#PPPXf>j90QQ`IRa% zc)x=8Pp{5?vf)GXc<*b9n~S%2>iN)_^&b65@AbSBIVI@%b^8opTWNS(8+@_HS28~G zL2&lN9h$Lb@5YrMUjF=DjBnPB&}Rv zOy1(zmoA&fJ;eV_MHhO5HGqvNvO(dV9Ay{Ky-^&a-UB0`3Pt{mf%M zJBMd`gLXNUxs(5c`&^vE@~}U7u>r)9`^MFle2qVNg0<2#(cT-d17JVVno9}IU<|Jy z`*a>PfXQh4RjRRJ>)Ov+cS{7o9kvbYUczQq>cNa+<9IF44DfX7j&k}aV-GA1(x`I1wmA1#LF6^Fi({<5e$QXZAAJ5nB%t~;7MT1Bk6c>nK-pU7iB0I#k1QyvJRGnPS%MfjLSS6=uKKI_7A_;9WVKMeSoP2ajRwGzBk zC%` zdS+ak@;Pa4^J#ZZm=3&w#((;7xU&0gt^~}GSOP%&_ zUh_2kbV7eMzKc5IlIzS1wz>E%*mSl*cdtA%&aPVm?LPBWFOYaE(m@Oiil zdJry5+I`JxH_;lm@Z{h}I1t?k505eK=U7{Vj9vQLV~qV-{vTodvbQ|O__a14V_eSj zCSZDuvBlBPO%_fP`vsf&u<q_-h9{I_F zb>y5k0*81+czM;&G`}9^%b61|a1{Y-6J^AojHQt>j6?a0!kp8|;C(haq3%+2-hY&A zXRW~se=wVQ&^he8F7dw)T~}E56QW0p{0Ci=nS~ks&1~Drb_~?E$XeV_R#c%?#n6|4w`V?MmM!+!fHae2FXQ zYd8jppC=jc6VrnJ&%J2Tb^);4s;nA9fKxM4ZqcT?V=zcR|9#nvcX;cKJ&8 z^le>?IoF)tT23si?8udjt(bb(@=mcv!lmGI@!d>a1Bdno`tPFu^MFzO=Qi#w?~B;_ zYyWNrl`AKWzGR=%eIt`7cL(`hzs)#yli#F|F{v%=nwBj)Kzp6Yrm~+kV2jFRoCDy$ z4;#@=*R+E>$%|w4^O6y)eZdTFqy2Ng=8RqXh{i5mC^fYA67-s{41Z_qJZYg{@=m@A z@7?}_qH#vOCpFL>gl8*{te4nVogG;OzcA>(ob^#@X4Y*2R%@Mj@vkvf%^7RN`X1b8 z;DZ~UCod(%@9+#Dj15eE5SMd8fvFp^a_WQ1<;w=4sOJp23e94BnX9oT!c*=#q zIx^lf;x{YAc2{2vj4SXfyCxbtx(>xz_K!^Zbo^uTjg|PYVP_QwYw@FHFt&3wHqAG_ z7|ru0+CY9i9LHK(H39zwa;5C(weZB6Eb@)u1J5`VTMs|1kj-cg^V34?o!Vk-VX$U(FLIE!YiYX-zPJ{-LG21pS?sm@ z|6L`KeWQuHgOmm`TJv>J2%EyaE_(>#~YjnMMkQIzO22U{8XLI(L8UsPJZ%iVhlpu^*CWjM-CvvBp?HJ zoQ=vUr$mOol{4(;&|BX=|rOZYDE@4dg|WD|GW z`o3e&MI$)>A84Rz0zO9S%)iTiehD~H`Gb^?r#!j;kTLv)rK|<*Q#H&h`N0;~d<-Z7ttD;n+b*D_9>_sC!zk_Q8lZ;=O!QqB5Kd~GMtarV0W1Lz~z zmQzC2wEGjm>^A0$j%RQ;gF!wD!M77XP4lV2c`vfIEEtg&1>-ym7CR?JIcG#^TKT<{FaSC^1kdBlJhmE zvd^myXFDHXi;Y$^v>ATcwSH(#kz+?WDe~;Gjy)$4d2#hvbEA1f&bDIP?qSUF* zvC3;M-?`PEOW6gOr;29gO8R^Tb6CZgN1H?Op3e92-!g|kjYWUtx8{(0Fy@Q)HJ1+k z%MPRSf`TJE0^3;dXdX8oBWK7>HcXn!bA_8@v3a{_W2`mmVU0?Uan|T&;voWcW3SPP zTBASo2UkJ6j$H?uwCy^qQIp@Y>(nuCvg@qCPwd!r_OQQrz?1($Z&LNAY5j->wSJ;)Q(8Z&Qv;s2;7d8Zer_FFKURDZ>&G7B@O{X^ zk?=RPenu~&w(-a8=(=u*{_YYE-ipoZgZ@UIFBIKnv&NJ!1o}QKy4GIf=;uTE?2VAh z%mapZueIS0vQD;+!8^Oa)!VGY&A`K&gs-Jp^HD8WKGeEP4QU>iv-Ya^|7GTKE9;Nk zVOsBF!l8HK4_m?KSg^`(@!>7wuq%znz7&hRH{6FIKgs@k`~_Ph;|8zvr*rcf_mD5* zWaX8#aRs)i%TN0yBzFi8y=ysd#yNveU1Q@y^Y$nB^3)~v|6Nzx_-|&7u8Ufu+FvZ& zE4(SdU7}xR4K;xy>ErUxi(gCyN80W7ctA7na-!D9ee5ld$zF@SVjpGhV{h5c|K02@ zF6Q+<_Lg(*h3Qf_s~b(n(3D9dIi6=27G)cMC`NbOLhy{v+iR*I^7mFllxdt z(QS26E{k${ruEVT9|H%Eck&$u=d%6(@O|>AuC;Mfg>2`?W)SAwPdL`x-UyzG*_%{$ zCU>^`#u@p-olS#NEc|v6TS7f(kQj3C{ZsPI8*ke979Dcu;=DV+tLp8Ab_1UrTBnaw zH-N8MG^4fHjO?g7^3T0{ufI@r;1fG~mq*sf0$W!)G9KHC8D(4H_xETk%>5;zohs_9 z%~Iw{^TAnC>wlz=!iTW0sGn=q&&b?u|BE&JLO}$+k66QMY-I9)7*WTZuXCvFD$xLi~r_zywrWyInnX|VPeid3fz#1GWUo=Lw z5neE~CSIitY}V0jtlmZaRMx*EgZwDYT-xz(*qAL}f*JY>X9;dfwDq2wPYq7`G5W)^ z-ifVU=*8kw$d~Z2I?G4JdahUupK@fG9?SP*`=~Oxd$*Jrks8*FWQa<5gK*ZvIJ;8p zan56GTIYL#vud&(^YK&WHJoJR@6IzVeOq<3)~oXEaWzu@3Ced(vdf$Ne*oE?vsL+$ z$<>#55wWWecTOb6(t4H>nmNEZkOU*YoAaBJCoP!{KCkv&*ownv4b9{CXkYuu%~9>G zrVn4Dk)MVv;J{=|u*Xph3~ki$(Y`ar&cxwkY~j5hc}?)mp{=esW2aNLD#0F`gO58Y zD;tL;_r}?B_TtDm<}r>Fz?MpEo-@{E8o$RbU&G$%v}<^W;HwpU)N#gc9JBAGIU-&R z&vl;r`{YxSy@lL{@ZpQ;uh&f;V)naE z4*fafJpn$P@or+TJP})5D7f|>#kZ4X_kSSHhG+LEd&fl09WugX+O*_a*1-SP_qw8c z@C>Tk$>OHN$Ivy`X#TZ#Y}1b5pMEnwLGV z+VfGwd>HS>=2!8|$FoL!{Net=`TE}&V~<@ib2U8gdMDPJQ^jk~B0kIij$N+UZR1Ae zKBriNw`21a>tf-MbsRSBcB*;*>uJ1yD>nb+i$<_x0y-i5q+&3diPM1HOizF*2-NkkS4?FLsWR z{y_G8+aGuVGAzF@L{0 zg5TQ5`Z(jzjs7QEF&K}|JvuNJvtAUdV9^L;@w;pql#OzV<|yKKQ!J|LI=Y`TF7cRu z)}Bz3#lGJcJqN>WejiNb&JA!L40Q#!#tqrSoHe4o(SOQ2DFZ*D;Cq8L=ZBw*$2oA> z{BU@?f-w(!nipKj)^Ok+JGKJ;TXF2Rj%Dj^>%6sm=7IEM>?^g%F3f*YExa<9c8ic* zK6UrH^geJY95rDR@%4BuU!`Q1XT>+heYDN#z&)h zw#VnJuP11~YQVM;{ZM-;a_*7wwx3vagcIKp8=hj|4N<-mo~-(!U(xzTH+Xt4vW8R9 zCH}`Q|6S%Z3U`IKA#;s|KlN3@eVo~6ulZKaF&jqZa%#CJ(V7qCcp7OtxtIBL#^rx{ zINt~ePWa)G64p&bHrdRX@vi^2%k2T~2Hq2&t&xo@xJ2WP>8+g{nRW<#dN4q}wu-cN0b5QUR zpLsz9-c$7D64hchYfyf{GNYcixoXb%R6Jyvm=8w zK^I&1*)&l5l0AmeWUk)>PdFSK&1)L}O zTlYn|IdgT!zH60dzx$+Z-*xuN)3k{_MR1JtX;iac>Hbf_EE?_l-4N}s5pK`2W&E3% zYsvgCQ&zByY)5;&B z;#y5J?qDB&w}rqc*uMH>yT3DhV(ej#pTPk4I%}$NY!>|hS22Ch6n@b&#RF2oZ-C#L zFJk2^U(wCd34llZ;Z5Yd-i5XfRE)j;ohxmfK6pEnwoIh`WKS_sWt&Vpiu5%;heteTrpI3+?a{)9b#@iW~0+$6Fs=9VEZX zK%qOXHC=PV8BY04RX2M)V^~;H*f-0e?F141kQ$N7U)EZGsNo zS0@X;e+EDJG#~u);Et*YZbUPJ<6*V=r`SBLXVKEBJC?%3u7Q_*0-m-6-gY&1>8pb9 zIxBw8!Lintm%fAU>raZlbw8Q#)d#K=!|a>p|9T_x=*cL2e>w`^e(W>CYYq1T^?_Hl z(E@GsT_|6$$2!099=PqJKKI7A67MnDX}1d7?K9BUyOe#fJ2pR=-&Gwp-ERJvEvr-$ zr+$L-lFHXHG7dwwuaWVH2S(99d=4L+Y1SiyR#bYiWs5gK-_iOL9?88>PVhg-PbhHS%C_@)hyrK(# zS+pJ$9k194-d>NyD@qq~;uTl078S23UC4<)+{;=UE&h;L@e1)FCx%gamlMOd2l~_6 z5^sI*E}K5i*lXy^@xzCK6?@p}wO3)`%jU1oaK@zSZO-0;_p6bQjEFB@{8e}@#UA}F z=r9=JePz(ETUn)LV^w(}$nst6#I4nUgD-n=9bm_!HOK=bX4V(X#t#k1uVq z@kpLXOUE5;k52+7KRT4!c4$qpw|}+fe{`NIAFt%lYDbU0m^rN(;)f&o*~t{_TIe;vTuK&c{_obJ?!~H$>ha;Qd!vtE%^k%C6*0?OV|ON_)=t5RY}@i#CmwkUt(nl2ztw(Pb3p6e%M* z^xSOEE4ex?IvmaZd<%G}Iysh3cIJGLq{M>pMfw+Rv&d!XTQ*$Ry`DCG=;=L?GMn*t z`#w3m%u>p{3ogRgMX!yNQw(o-`S5aAM#^=ff8XM?$Jv0+5q(}3DOZIa{UnTnl}-;zfx8p7t|`@4Im9%i5U+|c;Qce|nUZ^Lb1 zFQNBT9f{3fN&DH~n6#X;pY_e0MaaG;b+VUp&gdO=WaqTj#}ss+PV5crPs3p8T4?v- z!15gZbp6@p&5LEDd3n(CbCs}PRh8IutK1IJ5 z^}IdKpBwg=IcGJ`nP=aKG2Bl1@FCp`I@0Dd%DQG;5n0pnaeWm1HF57#AkW6vChC2T ze*7=Q=4@a2y$H79ib3y1d13u$lUzA0O`@;5Z% z+ZG=_n>Fq5Va4OU4Ubh@;<3FYHvGZa_IMjc(dX0XL%vjlF>0@GMy~u8d){dMp7=~j z@Wh{Ne)Pbb&>4Nm2fcy*elI!&9_=ADioZghngPA(-8<;TzFd1;!lm*coSY23AV*6E zv(MV5gx=h-7O(IP z&FNTj+K$QOcF~+JB<>8{_Aa#dR`K|Eu8fl3B4hQQZO`dw^4liHjDB@`PHUJ`$#3(S z51o1HMaFptyGzZdv zF0k=0x$h>?%9wttkC1y=V^46lLiUQ=`CMn%ZOYfLSkAG|Pt5*^ z^AqByrL-m8LuWBXtKHB$V;bsZbCVgEqM!kO=0~00dz}neJA)4Ul5Lz zuk7dW4z+&)wsh9zDCaZMlo$Ti-pP-QI6o1~`H7fd_^8o#4mL=gpD@sex@lYezCVez z#rcUJ(SBE&&BNS3vFUtKi~Ze0UC}%_jX4vkSfbJMHA;q;P2~h<5wqfL{3>6=K5%pg zdW7gr_D>U9c@>)t_RnDd4DS6wN5;2N@hf&&GX|#?B|)pE>zry79P=IQKzUO8?$O&SI>( zt>C|in-5?ExnYYvCJX1_vj_W}?xRa#JuBY*1(&DwbpI*j(8OPRVCI4LoWa<>d|7 zV#L;UVe3*KWky0D@BhRoF!N|OR&oKFz(mD*Xz3%29-x=GjMkD{!833wv^9i?(C}L zzicY@SnYaQ;7NV-G1fB1>IKdj@PqF&#!ADxt%~v8OucXAN7`Ljn;0rZ&%>Hg; zd|lXr%iH=YFlh{XDP!X&F?1L9^aJ1qn-zIe%zoWBPz!E8Z^X3z;LXZ{yBK$oInKh* z3$6*R$}O;mySOxene@w<6wYX;^!H#lui}h0!Q>mN{6?PS$V(YVjg^CAN~`RaO21gH;p(`%4AXgU&*=QCoas&xe;k|2DSz9E$m@dKDfn^=kTzK zml)HqoEwpMYnk`TJ@);Um3>CNTmMbuE#y=AEK-<<>(Rfu$z|EW9Y`g#g+1o@rd;JE zC9m7T7E7LG@74Xv4^*($v_|x2DG`Wk9+6K4hC

?UL<0r8U=`-r9?OybnA1R%EN$ z0AoK0TE3&;uO;kV%209=CgkIY{-`9aSoH`Ht7 zZ`qw{*8A{f?`AKo=38^K7JLb>eT(f}dD!~(pS$kucn_5cvd*kB;K`Yra&jg+bJIH= z-voR)uSj`P(?Z`C?&sLJw_*!EZ`9Y)&mVwKbWojc_n~=LKi%ZHvg%uTR?|W))DO=> zHexKN>ofk}P~R$dq1_MX{x<|DmmO8EnVgGW=udO9nD+Po(PPO$%Dv>Hjoi9E>&(tN z=0)XX-&MW_az`BALHmPlqi~?#2o53_4$d%k>5jWW@)w5T-6ri_kM8B0iNA(ikJ3-r z`77T0lRtP8c|>=EY=$0t$+zHPtz;nc$sQd}_McL_?;(4hq}^~byV;0#7twB2)=;~C zV)>kQpF}4;!a1li_+*&2i-lk8<}a^w_66lLP`;O`>~@Ge*6$ z;^9os!4;AJi}4daCVTcw>_O;D;`z!yqVx2<#OGM=au^Tvao?JQ#Iz^&>pr;k-h|fm zuQz}0-@>omw8^*a0NGtm5xq518n2J>Er))UE}vmF{9jO4GJL_V}h z(oTMkoXtQj(@T+H_dGRO@aecqDXnc~jznqT^ zN4^Hpdo?`iHJ>=T*; z<)eMdHTB?Z=7nUaI}*5)vV**g)zE2z8P|FU`DG68-iQqxe#7i~>rHTZ^*)=PmwE~? zy`H|?{WJ}0p0oGbWww362>#+cGx)P*uHe>O@*ItlN72cjn8ccYd|W}Ek9>6|xlXfE z&{@b=*8=^T@J?(plZ#W*=N6H#PCPaV9;+O77oVR#H_0<$ZV@tRfW2hWWTVaZ_4LU` zjJY&(udy_vhWOCz@~>~EPB!pPA`hx-Qu^HE-0e78IeZNj=cwES*4v~M%9*2<+d?^y z%3)t0t(`64(0330FR)|Pd^fWm*JD#b{_V=_YR@nx9+ND5@iAj5=UlBb8yPOnX}mXB z@l)m~8yQ0MzxVVS^M=mhtnq%ydB_UBKy&+wYJ2@n<@spqt(RPY9kg}9yysfwe6NJ=s|0@gLmIwIQ*UBHzn^q!n>Ff`<>*58|Dr#R}v|AKjo@|o-aus zS{UhPQKZey$f27!gLAI>xoCL(6_NUP@@|oT^mn)M?uHTdnj-bS#5?&cqtA`M&3GkG zXAoZm&m6$7a=ekbX*kEeGoY%eX+c#}BcH$m_AdE_$Y){exu^Y%5#LTYwwSyZ=Jt{A zJuxNZ81#f>TNqsEqW@TE9lwrSJ{|iUv`2Rb(04I?H^!-aa_HNM>>sgC-i}iI4_9(O zK*Z;uGsTqILK)%F_?727VeU3~8$DC;6FD~<*1m!ql4svjPri0yD4MUd@s`6_j%~Bo zVehP%;IUcbf?nn9 z)PFbkU_6}7z6?H#gPv=IlM?#nTtSL#3XTuW!(9ULlklnIKW$=tu{SRx2h^>{fGdtU zC^O?*y*?w@z+6;k7!_B+^A@uotYhz~WPdqIopn+9+{(%4R*5aN8VynV2e*umw>KaH(1)(CDrY8-ld zqI5Ak_g)wF{mvO{f?@7wG!0K{FF5W)FU)4|il@D1mvWHDhZK|8VUoWcpFu~XyIyiK zGQ=|R@T>Wt&!#Y5^lKyZfXcF;NhV;wU6#w|CO(o!SFsO*D=TlXWWHMTd9TM`cs_ex zLPX{}syk)46WEhrmup7mD`hYIhT83R*JokpRJ-c`K0eW7<#*ew`9?4SJRilccLDoR z;zdUALhwBYe7^=R6~`-lzXmS1-_4mb^5<;_m%A?`S3L5^bCem3lzEmi&r;@D$~;Gz zLCS1JhU|_d=7IZ<_eqCKO3ZzoHU7*vqcEOzflt=L^S7h$+|Aie@vaWw$^v)7mDaB8 zN{+wJ%RS1a?42c)JD>fO^X-Qc&NqT{kP|LIE_gbM4xR?C^}w|rxYmmffUB3ee2g+j zBCv0d8sB4-A;)a}e&)}Dox7P=qQ^g-l#u%xu>Z(w6qa+&S@byrT`6}O_@8DRii2iz zleniml{5CVoe^ofl(t9a*(YA1B9}6ikuo`y$pOY3V9cfeO7+j&S^ab0$5CXe(j-rA zAN^-18im?_yxbLi6*5gPa=Y|T>@(!t>MC3}{$!qLyb{`*K^v9Om+H@EPL-GXd~md$ z?;P+{cXi70R%mH_UE9RIU6w-;v<{WY(DV0 zS=f5`pB4FkJ^!opKGlAIE&q)cn}?L7*w3nwVMkgMz;2~87H&- zhQ&`=H*LW!3xd5XiBl=EpKX2gvf$Pk#-XyX%PO<=(T@lBN7`wjfAQ48LAOO~wbuWb z;GSf=pYRr=?X&bF{_oB42OXbLx#{ZfpM@XEjL-FQr*UnBXKD{I@WW3|5MMLkYvk%$ z2n|qw_r8oLiwsx2_}Zp@83o#xXM)2b!&fg_&iTgp7hv7wgWSoNr8Co|Aai@Un{%tg@%; zQQoTec1D5r;*T09KDEf+P|SKWthgwRcN4i#|G*xjxOwNfaz>rHgB&pMe~lly9`JD| zQ21W%B=lexofx>RI6@2I8SGQwsnV-97O{_-R@>Hoag)7Q2@kKu87a`+!J+_m0q|Hq zi8VfvJHc-G1wMZh-~XApXJnr%_-t{okiDe`J*X$^y7!qch$9up%I^5wBWbIP-)K+h8bpiBH=VxtYV*9GZPE?Ie z&Y(;;@~QllUDUaPK8h)yF#%bCXY9ch#XM6v{qF>p%lPkOeEN@m#=bYTbt*c@BXI_L zA-V8oT=)I~o+I=3_sm6hgtz>Tb?*G%f}ZlP!YLby;5;FjB?9M^5pXKz&Vf@l&1g8! z0?xI-r8_1{`Sx3Px!HL{yV(25OFaJ@uYKg{@QTthOOlgB1HU*YEQk+q1g zq>;KmInUVe{uh7AT${vuxRAS~o7i;b*kiMqOUv#GUwY^~V&(AlDW{0i{g@ckO|uJ#Q0Hh0stze2qXV_d%h?7wx5YdvBBZ?aMGLuhW+ zs?QgEy5{o*uaJ{@(3oo36Q{byw>|MA8 zuwQ$yzX~?}K5tc3f#5n~cr5$t{j?{2u~i^dMWfjb+U}M&X0t?xD|- zyBMGDAh>Q}f9WDm?M$7|z#nrilAYu}*?8>xMpCgiObOk}d_HL5Ej9Gt$V~&-A!Msn z-o`4{3ik~??&7;h`6yXmx|?K@+Qf!`0-KnNvc#GlR{qJYO^>uU)7IC>51GxJ*t9it zPu9uGV)(>wto${aD~-j^JasTX4cEiRp+_h8o$mXJE^lI|+)W+~ct*t|_~yjBpCS)# zDf_Bqyc%qBlJQvBmQAjdJymxA-kLQ z?*e4@uQHwnYn_Zf7VW`}z@;%aA2SMdww-+2R-PI?lU$!+`mFz@{Ffavqu5_xqj&Q; zGjFE0Hj^{*tDB9W`ky>F2FF}tVpeJS?98MZEQ zExyj^{m%J);^o090rWBL5!!Ps-PEo9)Xq0u+G~$N@>lOH?2Qd(aO;wYzS!Uj?)jUu z*0A3)Cu&Q0(7vR$oO|4yXGh6V<~;iXGL@|hOBWyFtvNBq<5}ne+E2mzKpb|vN@6L> z8e7`qu>Tch+IQIR$9`Q4kJY{IqCMSJFM1w%|Ga$VS-i*pN$xb8Ux%ULUUY(*Wd=Om zrgcXaaPFtSkn-pT15W_+$UFw`Fh0RoX&A>(xEtF`nC~jlAKy9)T@Bog{FZNAxmyP> zAm#zR_N#}Z`9%pdR?ObA&*Bp&C~y1eQ$oinpF!L2x*u$>A{UN_xv^q8q2;7&dR+O+O246n;Gzn-0C9EvVK zxKHaY2Apv(u{{x$yt9Z5Ef;@huMPCjA% zKAUfBtZny=qs%z}TlYgB&zSeM-)ANsOh`&>ov_Jh6U-)`=NgSeJ@@&q`ZXW_UEw8- zcXcm$)->C=^TNu3@8g;AC34TX1;=UxW@af$0|JpwlpRiZ1j&nfZ}iRCO(Unmq(x(Lg}4 z$H=+?-MNme+x={0K3V&cb-xc^=_2m`)E!rVyUU?BQ!S)S!DI+`@Qg7jcl_C_^ViRReWnrm5Q#RyL9$e_uJ| ze_Fu*6El#Vkayp~=h}<^dOyGObRG^Ksc~OGKUInR{}Vipb>2b#=mW?piiI)3o7UNu z8LU(MvtfK3l9^I??zzyW^P2Gn*U}m(|0(7nuIlQ%z&_ z6m<4^=*zv2`h)LmaRvLZ!|5!sT_>g0DZ>S?ZST;Bg`a)=MxK8c{Ja1@PJo~H!H?ka zm?;M(NA*PD5TDa|lWJgjCjyJlox(V5+4w{F^9JMywS5WkTc_J#EWOCl8jIS{c&>@G zA>W+N0m<&!OPh+P6|c2<(3$y_oI4+!dHW*mn2b@pGJLP@LN$)ZOeJ4BFpGYkJYV#a z99jo#qveTo56u=HoHfXK5c=BhN^WhYFZK;=#>4xwWj)b3aoNE7Py`Ro{$gAYz|1haK?dy_%)RzJ$vBx=Hoxh?ZuIBz=$@XymFG79h`fOd!WZs4>r0MY=PMMikSnC;`?`UFM;;X?M8h!G@mKmU3)0BExi~!D*J?; zvkKpK$Y;tfsdET6PcNc;CgoL5Yt`$seBfu4tIRg)#Y1J6b;@<)|4&Hq7qns*Y>3ze zTd@l|Htag=f(@ptZZq;(`I^leE7w%LRc*s~sN9U7?I!jEIZH5}ZnGF(9#5NfYLmKq zDeu(PI%a%UUB!)9WAl#=ug2`*Q@*C^`O(I$x}!a3J+=i4BR08NMZubf?K!n&TJ{s< z*sTk>V`r8-Xhz;CPV9E*NHh{Hn>v0Pl=2PCNq*J%l3;e^zHPfbTSjh98j`Pi+{9_c zSbOk1?KdOk%>eO;cD}kHIrIVSOd4p%+*i?=%8J@8iGdneu!jrn{fL3bjuWwO1$ ztUw;FGPrj*^1M3P-p}_&o=59Q4lljxv)s*iE_PpX8rMK4Es;FKt?$QvNwUCwoUOQ- zGdJGb#1=2Ur&MOQ8H@)6i^cG-NdHv)NO#Mntsy)nk0zdoXSolra>XGX&(Ei53mdQiIrWTKAZfOU!u6&9@mzu?C;Mn zv%gihM0KYImi;yl--7&0oCCdpx_!V@3chR3GjdivCOO~M>4eATk58ZM{qu%pWd<>p z#MAho=jO-!OFibSoe_H&B(5^v^jfivqm2WfWyovzclq#7#Jv%FjplD;P7IA#wCOdCT><7!|BZ3} zUC5xj^xM1CU#R-IuSM$reQ?rM#H-a2Gf`z`ypXZfDD0eQ*NaQ?@A4wQ&i0* zID7UPZGUl#mtS#O7It)e^=GgrTHgtg?||QeMLxul=kavTe(p6T!DeiIn+9SdwkQ0Z ztU<#pdI3G?^gVR)A35j896!=l_A2InhxVV|8Ae-KgjbQDJ6QTPdkkgP7$Uych1~U` z|DPEjl+V+a0a8L7!nShxNw;|-i~STnq5WDgWFKofynoWq-pY3W@vZ&eee*5F5sGiC zKW~AJ!*{oE*9mrtt+)R3ecnf(+@_wd+@W4{p17s>!RFiT^^#k>H_`sBIXTANy`?ur z&&kkuMiVVQo z-kAg5TOw;H`dtlsYNzFoFt>NgA2B6xS?^k-?ZE5^o*296GMMH3j}7)N6rFm4+}+uB z;0D8@soq)M;Qm^B3@!A%b=GCU-3=cP_OkD9Wnb&ur@Pw5hn`{_-oDt2#0z?(U_{2> zseZlLPCs_Pedjp;hx%=<8NDB^rRctXY~jpA?;G7G(nPKS$$8p)(!>6s?n5eh#rTBe z&fY)8{za%+?5AXO2=5r6{r@cIDcDY-v z+*9zcPHf5Wt+pb=Yw7HlX-;{cd$!S5etgXX<=CORsH=L!lJ@%o>)O}xez9VRRiAxC z^)n*%wa#kc^OI@EppM$|)rJ?CXJy@-dDqb;zO$;980%kNVq|Zc5Vs&PJKJpoV@z9vEtwCcjt>X_l<3nuApj9_x zcj1FDLtjG=*-zf;+Vk+gxF_XrJ!J2Z>kipG0KXjf6*aeOy}MGI+q<$#EWRuJ1dw5z z|GJk~FzR_1K8n-NpRr}G66#wxMRpM{wZ~=i?%CiYhkFp5I@QnEYkNhcj-PWTf-#5x zeGTOM%qnZ#vUD%0wXT{|lTWJ<5m!Eh}*;O{K6xU<(P#ey8 zbfp0DZTEf~KD`r+<9Ocn|B&}4a8+H`;{Q4KqTYJ}#Sw7`7fd7I062otZ~;vKjdPl@ zWxRT?AQu^;#3-m4P)sT|tuNTtAx$f!=9xBWleVu;ZK}0NYkW=nM!|Fd$)FiAiQ)HM zXMh7js!3ko`+xo<*R#(#`|Q2;+H0@9*4k^YjV(!FY=)NzoLXC%;{=B}(ModV4HEyV z_xQ`5_JMJ8H9STATF0Z={tJffvgetH4*IEu__}5tW{fOTxA)OM<FHWHy`Q3RUUbM zDlua4(J;$akLa`${V8Qz<#mldulFP?v>o|i8a5NH{bJkl(N^bCPjaggHdBexX>QZM zbm3Bb0?p^CKfkhGY|Cq!@Qby|#3I}D6>q@dA)Y?CU55=&_W7($!*0Ua2Vz&#`?^c6 zk$v%|Z1bMM`|r*3Gzg9Q=$D$G=mlNu0Kx>6*!P@XW}1mxbP)E{zap^)KI_=y`RNqTTVaiA@-@>*WGOoc*DK_|&~!{gtCczG{_eO&=Z`bYS}M zi0{jj_@_$wrteD%K0W)%6N68W*fS5{(bNrteOP(%l9pw!Lh2Dp>I+&-%jQ@*eeHEe>z)`Q2y0;q)G5d{!Ke>rsvsheBh$ z*)sPH?v6+N*Sp&?(HD;R2Hc}WD3|?=0cW$4jEz9nIxZ>wYz;!co9^h)bP-8g*BE2a zw-wQGh~2M<$61E0dCaBM)#UPARvdc= zO8w_=@HCvqCX4UJ#eK-kuEEaQENmay>c}=^Ak9{Jp85a$4OIG!wC zs>B5Xe)IjdKFWdLXyu4M#+Bcy4XvBM+U}rUI(3>vFILI?TWeLD1?xKwj$$7uD<&KeE3{t4DByo}tzAs?>#_ zf}9;IviUIPRphM($KYYLdu+}&IX72)Or-t2y%YY_M2rp9>1l{D@XoxY$%~iQ-vSNX z3@yyV2ZnV?yYAPnfV;lli7n{kfQo;Ojc@FH+jC90AL|N1jqe+Byv((!$WK1x52xzc zxe9yX1ngZRyRTEXjY?(@)(JIi#@WsI`oR;$<}d@_qO;Ow?t$f%7G(DuXy4=ba!5JP zQ%21v;o4a`vR)|in9SWx=zmG%lbA-b?#8pnWS_{%536>zum?|DQx3Al3vHyK_sU+% zHu4tX+p|R-e_c^Ou46+M=1!dkPO_1y&h zwBj%3!v8xP-Aeh_YW%sJZJW^lB9w?)6Rup^;X`&CtwywAn_KQfw#eu=a;EPb>kXlD zUM&8${gl%8;J(eJe7W?n7hhGqM+CGvgb1WrpZc z5`WLS&fK1zPCum)-;OzpF)n-~2YE*3KXcvc)AGx_cpv(U1N)+HhVsJwz$^7sp3`*< zD;$ia#}wp6Yn$Y(U1?VncALK1Bz7AIeRyAKTElS0N+tC@N8bx>YglJ(V!vcF?UFI! zVgK%n%>60pbJu)m-(8=>d3n11P1l9E~vfz6@8rqh`To9yE`n>KgN2f$X19^MH9 z@S89kA?B0@17(W;oQLr#FzDs!XR=FqCJd2kc-uJ63X1^m_i25gBtJwwvX3-U9o2RT z|K!cw_mp?vVf=}ojMzt0*c&hP>=m30)_Ly{WGy#k_FT)G2CtNSI*o|Eq5By|Inad8 zAG*u4efesd_9uZu=0;f)g%&PeACdZ@iVtE6X^C(86m!Zp))<}i&wTjk>-3qd&p*Q0 zi&aBT#j?lyb$k;G$)AG_W)FJ@(eandp4F!re{Zrc?^X)&Ico{X1y!tAEPkSFbSk{opr69jf^aY+KcugdHpK zH^w|9Z5N+Cc`rQVJoRVetMZs_TYZ^rYrWttmp1^ z58(?Ac%9SWbq;u4JG@Tfl3q!N`Wl_pXH)1=_{WdGn${2j|9FluiZ7|gKU%bLEq1y3BxecrtVhN!y7_6D zuUoJ^MIy5_F;303M=ShNp5ywohH>yJk=LX>R@qM8xeeLQ-9xr(X8zi)$)a8Io#O~L z3#VE z$LHeSG$mQ$a6P*bJI&3azjo&tp9V+jR9)XTb zvI4%-c(@+ZjM!^p=kVd@^AYP`qXz4`YKwg|e0Ox? zyUgcedp^q=tPfr*KDcM`fypEtlPGE1nph(p&YAU>puuPXX@%tfm z^YT`nNxXnn@VHuhkl&;4-^V{ag7P!jC-xNkaqoo(WgGjYpJG3*@T5~CMol&OR3rOw zzs$3H@k5>=e2;P99?ty3eq7 ztlu^hYdO~*Ui%R^iB`i;y~}(Xqpe8{)y91_XOMi5;(4JRcv|LrUchenLJM$p(0)bs z)KShD`a%5FFF{8__vc0vvl)7jcnsCV!f&;YA#TH%IElHuiFMpn7dyh{7y6U3ID=Te z@zCD;dAE->R>xS+PMNQtQgpmHFLZ>hBK}yc(^cyxpCkCXWv$~qFT@bv$-x?~tOc~P zj_X3s%1qokDoS+(W8XV1wz+orVGLzO^s5;q?X8qLShtnBBu|D zzRbM|?*~`3E4=YYOULv*!PuvmFZT>07Akw^t!;qDmp=7j|HYB$g{`fhzYXC=8`e^; z*#?P;B6Z%*Pud`7zRLYBe$s|B-2c&_l{}3W^mQrr?=PSGxJ%QAn;2_4|JGyP+|JoC zQOuu06Ps<}r>wLfardNuQ}IubKK`_G3Wc8(e51EIMmF9=9ry@RudI{WkdqvZhs6$p ze*JO4hTLsOKe|%S7JfMue-WF;v+y4iIMA_$*1C94cfIVxmFdXv$cu)4wuyKoVvmV- zS1(;jJh%6=Q)Ay_-|xGlq3tq{M%SsVrF{TRjfAF>!R3dq>-{LSF6-BV+mB3|u$#0# zx>4cW*0aRL6`D9ZkTVn6Cu!33Xue}TEG(J)wZGQKqiDyCy!)1MFYOq? zeYY{&*^Y?DBk#$W9T7K!JW^I8a0mAouUg%f@!HDyY1=-fZ8AnPr|RR=+P=yGyS4V6 z1zs78Qm(X5Xj`j;_EqvsXj{gnJTqxq(_b_i5L)cJ-?h1IOV?gQp}kn-={V$R6(1Cl zqvs-1{zbE#3&aIBVK`#PXXZ2PX*A3FuO4NIOz`d^;y15%;kT~u_m9Fyb6Ns%eV9Ln zVSm_a8#^TiyXWck6c!h#r{KFiHlA6pr2qYHM6d+sJ6PuAe(0AV87QXDNAC% z2l4!vF+PPJqM(m-eoqqnyooh*fuWqbO-*d`VTRt1t)M2Y9vh?p*k0h6mVX?HfFZd?C7nJZn9! z{4jHabGIAYNh|&Xt@sZ_k+!kcKhD=QJAaJ3xB&nBEo1T*=5If@FrRgGKkMp+tgHKp z&qR4W*41roC!iIFV$)=4nbTQstu10bAN{US^vC(BTkBQM=#jF5!BHVNDpZwI6W`SN%p1YgLk8kEDl(Afiva#M zV-AlsnEj-%?^ z=mX1BC}%%w?@~_Sa>q=qZ2S~N&%1~3-$G{C;SVH54pG)r({A>WuGuU45xiJs-(o!d z(;=|ns~YBeD=D(Snuu<^d>P*+NZ;8gpZmeS3rQX5rEb3MIBj1ewx^UA)x=xTnKLYT zIZKSZB}pBd>}wSC+m7e#Yf`x$-jrAv!rURg7xBl`!5v4HHTl4kEHKVd2A@Ozj0fKd z{IVVFQAZzrW-yTUEoZErUn6LhxiPGSKC zgL~7)IT3g^Gd5pGN3gSxW_9z~r0-(mTz!!%HqO=9GoF-tY@SbY{{S}62e5fQfX(ws z(od3pQnP>ld13woe_2?FZDc#J9fgK-j6FAE2hFBkqDRALYU3<+P>EM3v@P&STO=;; z@7~+EGgKW^>*y8EK^mN4z!_v4e_a@G23;4jFbp{R$vtrPF5Q{5a2wk4${^Q z4gjP0!EFD-_WHwyt=4xr{R|5w&k8S?#2%ujsI!VZZGYS|Dx3KL|BK~MQ~z0>75<^N zp8e|=J@U+Hc@~Pk%UF9l)bb2`De`Qv<=JrTSM)hHtFH1Q*h3Bu8zYVo6JM`GcuFvM z%TYsWUHI3wZBc&c{feo=y+(EJzsWS+MdBck!^h+{J$jsti2V zzcNq}bYEaZ|N8=e8}r=4$FBSF!oQ7ib9FClW?tC*(7A;VlYg2e|2N1#<5Ti~pZv=$ z`JW^Imp>){ugPC-$$y;u1>}Ei%o7VAxbC|PnWGjyz%N(edEnf_M4PQHim@dzz*2b@ zg?&E}dg)NZ+M0+%@+f$XPnGqb_{OM@Z#fe`a#7=3N0pt;_5|%dVdUb*uw%;3x75M4 z@cY_?d8y>>de>}E)bb^cT%zSmB(LO4lYb6E%c+Rv+v?=QyQH=8%XmIQ0j5D!ZwexJCR|m3!awMZxcz~6MiTu$Df4qW+DXv-v zaa~LvHUS=%2M=pAcvuL$;Q@G72)y9|cvcAG^g+32Y(97uJWO~4e5@M}b69woOHpgZ z#v)_i@rN4Xp}I64CUUN{p)VWv`(Z3h|IszUKc-^Bcx3W z=*L~`A^he>wPDXjTf;X-v9HU`npUY&xrcq-ecA~rLuf(PujH%;DND+%Muv>U*YDi* zoTs^Jb<)f1>krU37oe3eo8wd+*A#3?7YA&we?5GA{dRcVVRc|(th%j!l5J1@qy1{@ z2dlO9&M8~#U!VF&{i!c~t=^GXSHGGv4r7b9L!SbFC_kB3WGBuwen{R_TR(EswtBI(pMOAEJp>*}_pFp_cG%_2&l&pAKQIqkEll)|A75w}$)z zuN9_E>>(7GgZW82gg*4Pgm$(iL=8SAe4>qY6p=0V7;QPwtu2!pi)qUdqbhe#pCRa>q&er6gNf)`oD<-KP zP5znGkE`&GF;N4bza$UbxD{<(PG+`kl8kJ-CW_@>0e?Ua9brh1fOWz4>TPWVR^ z{j1QAr&#}xdT$->NqQverFs_KS-%O^c!;b`&_6rrpPjNMFxf(Taik}F@W0!J6nn5H z#|d360REz=-_~!{;2)*IFSON$ZmRLEg=&Meb9Ve7`AhHu36Yp9*lcU+BV zyQz;2ZTti0ZRA<*PjkQDxEEe4e&WC7Sz3qFuA7C8km&iDAVn12EQP=z4MDH7f-nS#We~IY+;uBItpSLmY z(M8)fqk~2H+medtM|9*FiDNuF_xsl*i4G*XRunpSb_jMe(%E6U-7K1MEZ?Rj?5Iyu zjwDN5OpzN?Ua_y)#5MbuYT_jL$F0K)qlTogK7gP6@*?Er|3*IO zUY;j8rJC~iKIK`;<9c>ew5Gd>uD0ZeIymK+9)Eedls{V;oQO`9f<76;__6hGpFu36 z#AE1YQhuNHQ*IHuTVL;+I0I~~)aMvIBxSR0jmW!VGl;rh87w$SNziq;4q(GiNAtx` z92Gev<)`*F&D<{;qYQmU6A!u;e163M{+9#U${RrG}gejUnb5^BHs6Dd!}m z!Ih|LzEZ*m7?TI3&&6k@H(wQ5n?`?pR@Q{@oocAK3!h?aGRXUN_u@||u_#pZf^)1T ze84(Pwlb_O2Yj5+)(rJEMRcE;Yl#tYLR)Jb-ng%0kLF8$4zctCbY|5Vp*f_y^P!Ody?OkeptC3;@zKWfOvIwPO3Ye#iU|MbF$ze@;?}IT%EIS7fso@O~KEjEc-BvfmTX zQY7(=Sg%;_$L{f4bk#}l58^wVKCHS6MF$kQRmu`MU>~vsXCx4dp?|GwHg+o7_d0Rp zA3^SV3fsa#*0%n_m}5U{;XLRegm3pykHAqB!8#XkJP91N5hJHco(DADQ(1Z#TZ-tW z^!bo3+mQJ6cil@}*83wj>v%b4oMD`cU(sIhviWZ&-_dPN#I=3w++WRS!k>Z~9XyM~ z)+scbTGW5B@Qf|R`gPt~{c4lFk=N_{DxZXABdhiNN2~fLNd9q~_3K|A)~{)j-`Gp} zIQbp3yYG{fv*nyW@GOaCpJd8dC9#z?W#kK&ld~&+!(PcO_EV8&XGt7;CbKsrB%as4cezrwhIf@!olf~dOIERWvXQ-$T6tzY?4LZz{z>-N6iVF+ zb!+3xF8g$C*jM$qHRe8D*%vEo%DFpqp1R-IlP7Drw59$X_7J{1NV#-$YCnz7KXBH> zG5#0%&?Ws9c!!v<<2}hu<8)tM`fr+>=Te65SFOhyF#Q`_SOe(ko4?Ov&_CZ_q(A*LyEJ#$8K3VXz>>Gu@&h(|$}$cYyf#(}~f@lfbBmU?6j5SwJ2 zGIrgM1V>_5Mh9Bn+dG+CGr>(WdXA61h{RG!Lcdy){Uu@%Q~reePZCQzxJ|yF;81G| ziOEBqr&p!MJte*_Qf5Xkbvk=_CS#yC82WlHv2x8gxek5KOg<0KF!!C_ikvu^HO%-8 zLCel!1K+_O)Pc5v!A~1D@D$=$wqjEg8@OrzJ_hbI`#0ru+P|mieWC5+2S%}f|Alzx z5`Wf=w|nfzI(>+}dl=)iPuQ@{%bwUT3bu3cSHkv-9x3cJXTHEiLLw z*fM*-mi9y!Z0kM^ww`$VoUrAMxe~VM94&-4Yw5bGHq(?`^c z#LHbSXH3O0$HZ=MjgH$8vTQ47C}!+)j&?@q^$E|4sr0RDS!P?e6&axgek8Kv=N9uh{@f?}9A)w+JG}5;dem{OuB%U8=gcE350E}_^mEU24AA=Ik`o$9 z`IYnH7S?m%)32_3hwu4=gSE+UBK?&_{0q>syiETKdnw z*jsuiScml@@|0u4Z+1H_&p)I5DtNSQwryG3{UM{%?jO9&Qyn^5neEwm9QbW_<*mz0 zu&q<>%UPGR*tQNm_H>GEP@6;Mjc;RjfHqDrcZto{=;zNo-YosBm(jDIuVGw7BTx41 z=l%98`}v+8eb7pONt;@kV_b7=%L>6^)`sBGSsMl~tJUZ|c(h}VXD4UboSv`h<@uob zHgN3o$F38ahgP(;7O{_8X+?ab8aw)FvYvdO!FL87@m+U%$$R5UdP(d7Yg%;|z2peJ zAV--plsRst9es`4%#U+s6UE@|{_$z3Bd>768FX`WEG|zV$&GX($^J#y(vJHyX zhAw|YE6gGzAs;c%#l~fA@O)i-{3krQt-gqUKmVRy-yUdw9Ah{HnqLjgzXZ*{1kE3W z<_ALa0s3nP{d5snnxOf;(0nsCtHKR0?Jk7Y^Bx>HIvrZizkkr^d}uwdCTw&Xw0^xU zq&EG|{B>KP^|Z>ob(^7e>@}xTh=D4!z8zYxf-Vlg17r{9>(GhZH-O*O&`B}%9^5c+ z^g-z4QR;u{;K0#^(8+e8lgh$%ZvxLL|3v5{w6V&d57BW}!AshBx5_^;No)p^Ka2dv zvvur2shf+=be{U=p|7@ro2U(u%kuuy#I2)Icu6C2ZufmUU9=!LjoUEy%!xJ~huOb! zt`QuH+*k+>Pl3Zz;P5GM7y=Gg(-vvNThx6493BFP7qKVCawbbGICTFi#NZd92EQ1n z@r&261KRF1_(i48FS0a#5vJiV3mm3ca42n(d%@uZa47t$a6`!GLU6d9`u$IajE)6| zS>VK3>0kFc@ND(Rt!t$}Qw$ttfx{SZ*vh*Yf1HLx$?qV)@oXLbdeEiI;Ki;}n_n?; zN6aVWM8V%(pN_vvanRt2=1%-+H2D8p{GGTGf36<*Gh-S>UEjY%bk+UfIY<0!d(rja zRhQV?lbnJtx7f90&y%_LVc)NUG~G^gIYng;M!J&xP8D&QZc&m&_lv$==Ye8#Y0g*b zej(>VvS;I+bWPuTb}D;^rt&TR#J%c!QjhFa*mSI`9_#*cf%#o@LeanWL8mR8GwDNz zlzr;5zqeZUMDYE--}Ot7c1m5QKA3|qT<80jb=ooC$-bt(U{b(``U(0waWSJQTWp%* zS7qHJDL8YmPpt(X;ihrKpY7pCWxP+0{gDpWHsV4}z=u@)O+9K?%&FAj-TTMvx8~vf zwRuY7;0O-P=SPR>`SW@{AFAufS3IAhKbQWKa-?lu&CjumKMAZ7^GE4t>_K#P?LquA zXSZ7Un#>WZLyzZM_KvPsiybzK{b_=yVxGx<%APTPO`F1SH~a7myF&Az{#u_!_hDCv z8l>YO=*BJ@6T5=6@y@q8c1c{|+<|%--@T`M9w~D%c{*(mJz@a#W@pI#7kyl%fh((> zA?oMdX{XC}(Y3#THf#H{BCD~DU|Vs1tS(7;5Zzg~v&4xF!SwIa6tUV(aW>)&2pSVOBf~@nr|>E02iJv>F;LHmC!# z{sqicv`yfb7~IzSqZjD?mrI%dLd?!{S!RDa8dY$^`Dl+Fn{PgIYR`Vev#X`GsF6A? z9RqenO?kDa4J$WXhyA=+^oSY-@hmo6A07iCgX9owlce zxkvcIdmmZgl)0i0ID5vp{+s2yTX{xfbptm=KT6I9dGqPGx$vPMl*klx4?M~xc)cGcUD|qY|&|d zT_5c@b~Uu`8di(n;TadUo^2a{vpX0%@36bt&JC^A#(~cq2k>lsgnMn*GZy%+g@&H6 zd}nRf&y04ZeV%spH`-oIZm( z(0!JQ9I=VLq~h1Tiv7TjvC1=>DN}sK)1+*luYLZ%DL;%GyJ{P2ZMAW}n0A?ie?2|dPazUM$*J6B^kR|ot)qDK5}UecDfpH<@Dd1l){-CQOF>z_8E<{qQZB7WFK+r zo-Npq#)a=zn`K|b@W%DyI(BU(4kT;d+k*Y8+Kcc5&jDufWpe^kD19X9Lj0TxRp+)4 ze_{KkNo>~+FuyDz9}$j6muvd40+5k8IQ?GqZafHBlxm{c6a8RWdz+c-gM33v84K*4bPeq~pw-7&TX(1tr)pe!oOpB3 z!~3^qu%GG{_Ep`?UMS)%AJ=15e3>;l@%vK2=a%8{PDOc6=s@-;+IV&&^l(3QDx^m! zqiXMCU!AWb{frYD1m;uOjHkPazo+?*K?@QmZWr^l=z_|-wr~0D5vQ7H`-gWEPm`Dz zLeou*A@;_-@;ErS@ipc280cs?ed+}7E^wTsj$*uws6DwVCMgqKwAyDj#(Kx>Zb5c- z+&N~q8$Br+oW_D%C%93;4e-rrf!;cxH`%Lq1U)*2eUZe?p4sxrnBBQ{N3GD+U!ps9 zIh0Y`POkE`$3iPA<5XlE2lf#M^~&=s=t$lP9mPT?vCxSlG}CzBl63OJj+rM!4uXyx zyq9t2pf9wurtC^xwe83p+k;0ABL5#m$Gs#zk|#Lp+(|pn2XFYHGuA0%wO0L-@{GhA zbj%*f*%HoEg70zk!PmjJ@H)}yJ0{B>7S3FOKbp^+@KSm9x-l1+@k0N~x_UElKXbHr zoyx%+EONd^g@b-?6JVdxY$nJ2l!hksJkha- zk0F2h0QTpFLq{W_rwHPgj3NfgXx6UB;0rOJPU4mj8~oA-=&4c;ylW}?X>#M{uf3Ah z`p<92&HUZl=U;s1?V?@(BSnqV{3)J?h8jjFGfzf1W@ciWh$CMX`P%qy z+r~CWl^R^ZhoEgbFaV+-oICZR%_o>djQRq$L3)5upcd*GG#Lv#p z#!o$%0n9Dwwq%JL(!zKby@o?v8XZSclZiXDrWw9oO`8<==sNTL4`+4kk~~U3<++yh zG%epIcznw^d)-Sn@(!G|d>=o&i@%%Q@;ATHVr56ly5jzZ@JykC27$F4lC z0t4}DFU_ERV>-^BxF4SIkxhB*W8kilJ(70E{Pj1SsNaNqUu9?C3}=_q&#!!ge*5XD z(Y0T$YB|CFkc+~vCH{zm7zz&F?ZJ=K!+0a_&J4S+-KkdXlsVQN>-q2~c$U21Ca=Vf zfX5|ACgKMM4pZ`N$tmf|rD-Y3rIh(Pt;{~ICdd3bHTiwY%^;@Z>|d+N2dG>8Wa1w2 zv@;(+>vVc9#j3WtWL3Eo;goTBWxcruj8)Vt{kWYm6>~gQi`O{IF7b)=_$eEV=QSN2 z(^ijaNh(`MP+8@D+_$Y>}}E zuh=4E5MHsx;1yeJ$({T_;u6qz?a~&5U#QHBJ>P#1yh{rjIeTI~yyAkYJSH(Bd11c5yvL%u=Vt7R) zykZ2rA`)H^4xezqCxl0Y!y{^_vo{`*1dkXAkLX#SK87P2*HWMGaIwMU;tSA(5Ar7H z)d8<~SL_^4Wm*LMLhP2pA0)O@iSfS1p-gMqi9aMbNx=uw#-4DAFEjY-9lJi{S@kC! zKlzCHvWY%rY`+rEcdY?z+pkr>;M#y}LOeJ}>W#Ir>ZqsQSPNaJJiaum?EA#!ceTkL zD|DD=Ug!C(#E&XeLtMdJ+sHFt%M;v|d30+%-|TGM{`}i6h7 z>Xdk+eYJlk?N4URnKXD5J!G`j){%`VMmt-G6)89{%SG!bKDIM)~8=fr1QnnG-vz3|SBl!GjJ@d$H|JU^-N%VCWp zXoK|4Sibow^mrfixB!|GJdaLcu7D;RkVP_)MPiXfT9HNkRZe6}$9y+>QYw8Z@v3dc zoR$flCPye&&?#|)G&;?OPB#IMgR&#l(Y?~_Q8}Lon)oZ<$ewnA?`_V39R}@~>36Y@ z$5QvneiGkkWLu8Nue3{K9@YRarBHU;fIXu!Q$3d&@Tv8w4wvK;Tdp&mGrv=nOB0~a zDxRg%H*v^MF~qOKk10L|Cx*l%^{m4jH>_!+QSD`(`%w+Eo34Cp8ZI%qJAHGPY3&rkFgJgdF++v;detvhu5y(sLM_J z=)Vl;PR^EWBJHzv=YhHS>5BZn6x_Cei^quBCUa_Ub;#Z$wY|ks|2XjC0a?>=0)y}+DfeMZxhE-C=1qxZ$d2w^zTfX&>dQY+>bU;^K0l>r zoEbGsTk*xUZE}v5F(`a@9p8g*uktFbAB%F_LQDtRt# z5ZcS+{Z`V%Y+~(lc;g4iS7zHp-c1KDH8#hL!^hO*y*BuzoV!WBzS?=7b33K~`g;F+ z>Xi2{)4y%d!?C-~aTDH{q~ z*VGR-zP*Vybljk<$>91qRw=adsw>y-|Szor@X4kqK5o-lzugz z=ODMqJLu_IT_z0b)EBXLHA=a@K?9PkGNQztJ@F#(p86@%mN5^UIH$@QW5XHfNcMjZ zD(l$wIPHQnu)U zO%G=r{PUtCbDFA9iRbN1j*Kpdk>E#OyCkIZQ$hL#(9iqd>!w!67{cE>HY zJFUGAi9EE#t8b(5U#S<_wxF?ppiSF zl{=uB+o7E=Gk4sEU8}#Q->BeAQ|Ey5%L8Vvf|tU8o^ui3)mmN-_>-~(9$9;C)!N-PZsPbp1AIBc zeC;x({)@I$!BcX;@hQ^QarSRc?8oW>!V{Ieq@2qVM?|W?{$O^aMz7M#2u3x^N)zx0>+QveoU4P&V)825u|0>!g zHp6Wjox8VgJZY>$IgNFwZS~mawe?sRc|=F(ZGG)`7TB7up-o3E-&xxfVYF#)A8ksz ziZ*Ti?{#N7Uh89>@W0mex}NrJ3q04-j;~q1v$o@J%{t8$_tB24qPZ_%J#D20zH4dM zotE#c?Rv&&7mnGT@r%Ot?sp2rJ)&niO7+T zkI$ZHhgS}>SN(9Sq8y4nI=1dc)pKbV=@0V8*1ZpZCWhzES@0g2Lw)crk$Xh{J`t`= zlX+HTpax^EZDRkiWI`s)+n>f_6cFmrFJ}C2+=-U7EaK|pu2^H3eRb;kL26AQ&coExgpQW7FN?316XYKLwS;qn5x=P*10iAUVj?KC&srvvryU@ys zH#?py=6jJf-W%7iLFh(ep|(7sa3%$E2==^|Egmf%vz+B4wluBmcx3K$wQlC4VgsEI zjb}mYnb3R&w0{dS0_VHf?V60R_f?zb|7N>hmzR$c=WaDI&yUpV^3(A<_2)re{d$ga zRa^Dar^(|+=QMHj74BP>>Um-XHgMiiWp8jC*2`(B3TntT-ir)EJc-Bl4z-s=y6u0+ zTdaQ<1rL|=S+zAD>KOq1Vsq8kd~|vf8WK7a{L9>4#C$35rQHr~Z(TKVm|yEtD_+Ne zSIM7a%&%SRMRsl9uE$s}@)+wyX^V82tm{S4=TccSTKQ*L0}|iGAl7@-y1v$&q8DCS zhJ1g!e7~hD4xdIx?(1liZVz=l;L+gEfi5-_1Gmr$?_@opiT=07xO4wp@4qcZUB4Wn z*LiN7Qg;;nV=nmYsrxMYxvs}(dzP^!`wr^owmIQv4mGy@)8M%ics$_Q=!es0KS=D% z%bXGM?jngXsrw;%exk$nM5YeYpF<7U@^#q80-Ml~*1y0zHeK%{&MN4(k0Ip*dnw=# zV$*H_Cl1bu*J6OH&bDam2Qofn%(a3WiQy)2m@vp5{gAy%9ed*~2OUxB5*!kj zy?W_j#<%FMd+lduqvOq4EC;^r!v7$fGB$JO-MKJj#yrZg@q2=Ji+hP3n96S-_0Hw~yVUI=ozB$` zKiq;HJd3}_hj`wz}m0v z9P)TjGWn{H%&vA+&v_8vkq1X=`7VlG<-r`zg@LyI%>L{M3oho7Zyw+72~jS-CTG;S!2$U< zlMb2#E{W&y8tc7+gX-$<%oZFKR7bX_K_@xb*a|4iOBsjxM%widyi|BoPZ}X+Po2!w zDeV9DfRp(TrnbvGB7UOcBbr7#W9Un>jXl5btxWMz?kj&@jy?{WSbJp;#$)HQ&3i{9 za~>%HMf?(mHLhYzIqvs-9ld>x#7B>@wf{uU40ttBF|6ZSOPYin(t=FU=_9JXhp{3}gVwH*x@e zMd8Rb#J!NbdBFPPrK5JwkGv0^2Lki<5`)-=Phc+~C}s7uyAMW2tt4~4mWP8{>I%gGr<_+!KZr{G3n zyC**0v1`3;_^GO3Y*Qb0EO$SV+Ax#0G*HIuex6IC)G@&Mt$K+S8*YI$16ZZ42iiM+ z`~h~Ca7puxgL;u+F3p11xwjmhE%n8)-W5ZgRmgo?kPY19(rLd@F8E{ZtFw;;2fp_@ ze%u1QKZ_3E-GUw#p$kV>hb`0xTVb3;jw1F zx7cdx*~j#gZ!ivIU0duDJx$x?V@afPdRd}^#YZ=7atE{&s0&havpxrK; zoNrQfP}y8PTiv{Aw#lzO@LL&gI=>#YTliJ%rmtxH+6Mhe%tdoN3cvPIW|-w!HF->a zod&-?1iwyK2kl;ioEgS7SMeOQ4fGt$3sDZ*h|`gWJgVB1gM)`Cmu&FsbZjL{HVoQ5 z*te#A?)`&y2l)!xQ*1$Po5dc?_nu8hW~XjCI(zScl!I9c_xSJR1Stozh?~2Lvi38l zx~31>oetc@plpj~-jO_>O`G{{%j|UWrGH7el(kPe4Szd*?30e?q?{$Hjt|eKYVvKG zod};LKFh^)&H=R%OW$SFdFCrW%wt|n2?FRe=X-mpL<>C|2 zk}XdEE=|(OgLW5yE8(>s_=BfcOQ%Z=ZmVtN~8( z<8e~&9(bw3_?Lc3cqC2p0gSW}*T$GbE?DM}qxb`L@zgH*=sx=eQTTfB$wnycs#rpnt}E9kN$L3Ul3S(1f|? zBoQT)pJGcK+$1eEr>BIe%XQIG`@bYoci~~O6+vwOmB3C&r{8-Ba zFBE=XHgr-?3`&ZsLsXCcf6C zNBK#D>Q;%?Cy#DZnd;GjrCF)|F>G&n@)Qd8toN(sJwM9zWv;!^M;d+uf~)PkH}mgNLpAykni1NW z#5*@*;!&=fxJEH1CNn0gxf1VKqm|z1m#ca34exg4$Ua;BF)kN04Px?KitKaYCeskx&P8(}@79DKn z(P^lNXUB|lJ!D@VHZtf%^j_lpY+)^h`N8zho`Bzh@RbGcakda+F?O1o6+168D|Wau zYtnRQ*1>tVW*rYmW_=m);j#USql4tu_FC9qVIn;sB z!@%7G=@*55iRUW3Qslrv^o!e|iv!a`vmEm>vYf*mSuxZlX$NVEZ#ap%8jQN$psqcX zFJqfMWm;Wg>kXx@?P@4>h3Uh+sYZ6z-bVq0TPSR!@ozw&EYHezx zo($FV!$|59e$R8Qu5<8Gm)0&9b-8xCjCM`1w2S!7TD#)7%2_$!TkB7o&FmY-KmE|c z9=<%)e8&DZ=^K03e$4;TzJdRmeWSqR2EyawO&$x68w8INzV3#3+ZbI;)a zM4RuT4WcuP9C9^nzRzg0N}J8SyxVwYrsE9QP1_`Le#3!u_$h6}?-84<+Hl}EI(}NJ zYz^XHFXtOf5*%9Q@}>dk^2n;KA6(XOd0_la==Mt8f$=E~F-vsZiQJ7(Hf7ox4s5|j zFy7wKl&+T-A?4ZS%na-Ku=vaxwq6Y$YznfD81te5Plv7Hq@i!NeBZ9s-?AmBp+nBQ ze@&^|x1(d1*b9a3SqIkq_B&&p{8BIDq+x3mdjA!${(gDS^-*4H%a!HDEdTrE?dySm#!;hhnf{%X)-rz5oz~C? zv3=X=gHY@R_B!@SG^7LtU3`tX&7u5~wJkOAAYQ;KEO2w-%t5I8XdKXGuQn8 zp?`M(S~0&pK-pE4;p0kN(KaXZe9c)MS2~^C*w9LUHjJCC(To2h6OVHm#5YIAON_DJ zv6XVgo+>b9RBx$gTwxo+XXn4nZ*J6S?vi;=nOYfh8fpx?zF+8gd_Trof5x-QSR24t zQ#IPV37leEd+waH5pxi}JAT@!rYr8sJx3TLLUUo2R$)ymnod%*2W^xH&a zx9ezY3g@v@Bb)xG_8n)lpwmhW=O0`?nIE$o(YF1l@Dgz9yY#e$MceJPn+cm&sXqDXDYUelWvxz~J6Q$1_^q+_RVt(X0GE#re4T8=AqCx2to z4RrJl>!08G*x=dF5p>r2@JGfyw3*KPznb~sbsC+eG@Q6eAG-~dze4(qb-9EG2Hc_8 zB43qsm?eE-OV{(?ThfOt=^d7Ii6y<*l1{Ls!z}3wn=R#A(uXYR9hP*7CB4{^POzlI zEa?lIEah9$hb-wGmUM|Fz1WgYu%yE*=?m4C@-68@mh=uwy2O%RY)L0r(qWeLg(^$= zmh>S@dWR)lVo5Kyq!TRZFiZNvMoamY^dU=nhb3KNNiVjf6D;X4OZvhFOZk@cAxnCP zC0$}kFSeu;Ea@;y`ojH|@-68@mh=uwy2O%RY)L0r(qWeLh4q&5E$Ks+^bSk9#FAcY zNhes+VV3lT`z+;K(uXYR9hP*7CB4{^POzlIEa?lCmhvs>LzeUoOS;68UTjGxSkhsZ z^o4bn@-68@mh=uwy2O%RY)L0r(qWeLg|(LQE$Ks+^bSk9#FAcYq!%m7$ot^23V)Jb zOM0Y{E+?IAq}PzX)kw=a*9s$DNP3--mbHXhBkd)<&q%||hdpbgvq}HaNZ&*HqLIFn z^k9T}f#J)fuQ$@mNaJ6vJ?Gu9g+>}!h6Rjt7U>!zeGBO)jPzX6FBs`m(!Vv*=t;wv z|Ftr2B>j<*P9iOv1q6oaq$e2ZDWvBb>3Gt28R?0n%Zzj^>77Pe=F0CIX_31Q8)-M` zGe&wW={6&cZ_qG>Nl@w)8E>SKrc;I`7-^B4ZZXmWN#_`8kqcN`(Y}?ra-Wfwx%F2@ znrepqH|dKj^||fBEr%Pld2Mk&ZC;al#rOGbBfrEV2w|d?=kJhaElNvYAU)qmzd<_J zNQ+Fg-bjmm`3WN}d$Nuh>1NW+Mq1+UD*!0v%O03;BmF1RlZ>?NKT0>!auzE-&|3cA zkuEjTza{-OBaNOA{;ZLHk@U+(`Z(!xM*0Y8&T-Mo`4`fk80nvq4(+c${|V{QMp}H9 zCK~ByNY66TPm{jINXs7l+l(|mP2mMbT4I!Qp0!rje$q8YTKuVq^R7Mr7U^FYX^Bz( zl9B!<>Hjp+vToF7q<4`1&`8U^85d3)fgbcllB|wb)+9M(iNnUo3%Wpq>mcu0O|iQ(nX{%8EHT1VGy8{DRNW1k)~>a>=93;}r12>X$G)vSpGP{? zNVB#UzSu~okp3!ZC1`NCyR0-nzQ~_9xwK4C=B+F7=9ha5+_~<&%5ra+JFvoCamv(b(`U>~N={8n=jkmOS@Rd%x^VH5rOTGz_T}5}xbv>uy!-<1ij_Y9s>0R9 zfs!?)W#tw3u3cA&GIEW7gao$-_3J;tVSMLw1&4$V92C|uc*xLU!#hTVkBk^Kdd%4C zBHen1sOWLmf9ZztF|lzIIwnq<9G}n$tM(q?`713c^A+c=n3}vc?Y6lK=jrfI)8L<} z!9PXdPhUA>I`Ah2ZeHV01pd^|0{@h#0mEZsMo%)~pW(V8WMI4jf2ayV}3r_7$P}(`L3nj@`}=8111d|9U2{o0uyk7 z*fnom?&^{vuMYoBH{Il3Q5smSHL-vu`d60(O3U4yc$w%n>(p8z`8(gmV)gd?pe1Y;g0ct@3XE7ChG&;EGNf%&pPkSd)6iA-Loz) zH-6oCm-Lo~C#~P~RB!z@PjG2=g})f|869|)U8k4)zua{?5TC8<^l!T`YwZL59bMPG zpE|p*t4F(ao%z}LE4rntnjRf!EvajuW#{|6`GwuPZKB(2OcB}ldQ07N+zSK6-tJ?t zxWKJ_sQ+75vD#fxCc~xN=XK?zF+Qg!rAxu?*}n4flCoLx@!sOedH(YJfWLUMzdU}e zztA6V$(OA)h!Sqf$!5&043zrIeXFmMK>m?qyBlU=pyqC@!=1UpT~Zpj*I(c*m?+3_ zgTOKvi^hoDYyCwZF||70aj3%vyNgeYY>Kn7R6P2uX>*t&Cw2Us_RI>@AI-oS$2|G7w+tUFk0? zFRhFhS#>4DzanpPeqi;aL~nlnj48><1$oKd{G=&U<4Y<7Qzs=%PMkhD(a2kbT$@`| z>dh^vbg!-`&&?~Ubms+1O9N}Y1-b6I{<4yy+)9~Ci@dA7#pT*~L(G*)TEyUDx0k%7 z?uufwr-~{8uB<#ieriJA)D=^w=TDoGFe7t}YQ_o@sT3w>;km@6PvX!i*-&7sE*0Yjexo zW!`dAbohetUtU?_EsI|rD5xm%N^=7hMFsBSK)E}&q{Pdh&(1E+UG2@z77(+uK}I&F zk$)@^NPXrXCXnubJs*g^^x5!w&0nVH77LTe|HhTOF^1rR^CnKvlOC1n|GzO*x8CsSw6<7)>63ELQ@YIH*e|E zg|Q3WOXrR^2xW3!g}Gc|m%gj#@zbgwtNqTF9p2DO$1%KW;bR_Lun(JA<1 zYtva-c?A;%mF1Q(O=)DiB9{SKFw4E#U*MiS2VPpVVlvi|m{?Gt=U(Y8ck8*7AXixY zwGA3`0t)2kSCp2*%ykCfFQ%;~2gol19QPOB-0VQ!Dh4Ob{d{HTl}%4#R-QIFL7V;b zK|-n5F#GBBq){76KhuVHtxOeNwyT@9ZjiC$G?Jf5On9EC7R>&0J zIbyL0bQ(s?LHGX|!&K*>>}-xM1DIlpsArvd4Dmn_&Z`Z?YobBzi^Y}8G@+#@Nq;`d z=qaytA7o1C*m3bj?F$e9uLjGwW z_L_>^B3;P3=HaH8X1|3MMQNCWGmCW^xTb#iA7K1};(Tv*zPCtZd$UoCG`Y5?1ntfd zZP}2Zh1jxDEXw_>y^|LUdCW8A;mo1gE6 zW#?ju6xGDNAV53Z%aHVPS7I0trO&E?l$94~9JpMo zH6C>&S8%wvQhK-Sn!5Fi9WpEZRz1zs&$M1q1_Y0Yn)t<5r;X2eeO?gJ;pU2BT?Unb z;x_syKY%)jC1&Am3p5kW;{AE{sUpC8guuvkUzBNPRh9JjS|YTwRK$ z155oZb)sYU!V8KkFw$zg@_&71{|uv7!7`-%v4pJuiGU?*VJrNsxVmr2TyRT8ZYcvM z-xQr?M#CyC=4zCTa?DapOr-&=HwBZw*v{xa1zcgOGbFdpsljM~0Kd2r`MwyVIkRL5 zB+XpuFVn0{q9uuvBs_o-EUn1Nttbyn>QtYQ4OU z<-xVNm6_>tQf_>B!y?6LA2N3A@Ui^$4;dsE`5UcY^nXXftOEsp&uBW}-WR1m($X62ZmoZ~}UQ1jTK(e-zIC+W&jA(&C@dfKb z{~+Ay^!5Fx8gZ-q?0-Ld<`Cg^uJP|5*usBw`~K1OeZjQHN@Sn{e`6(8Rtj0|QS3v; z#CjCR=m~Q_=NRf?f;HSCFq#`432uE!QYPKHNVpwM3~zr$`p`aQKU_z=EEKzcRlV2tN|QzFiV+} za?A4l{;riHT9k{0UW5lkSY-B2?+df7U7vS>Wh^_P@fjcGRjgQn2-sOQIMI=(tvj*O zq+7EK%tAFMCQUas(Fnpm-;nFkcb(>Q3Qp6YTOJ53lWFA(t;V#q!(uhb8V5LiEqoSD zM*IQm6~zd@EY%?|Yvxt+6O(GllG|(7fZSg57O=V^J}(q!E&#f7a-=W(#RcAV*cx_{6unV-R#b%!Q%*Cr9+OU2u;e7d zW-%kxuo;#irFeY-R5VMkK;gy4>MEi%nh~~7BpF#(UJ zkzQRaQZBZUK=E8}vEN&8Q-ABSyh%={w7T8ycwsAn^@5M-$G^VgydVM;sh~UNt`-EnWe-hP7}rVh)T35 zWuVa&L%0Sh#RPFFw#$E36E$s~GGOdL)SKZ-;tWsLqC1N3e`x2P@5SnW zM`B}##Ks2y`W5Z=-_F0&@dTwDQeIU4QyH)HQ%9-FcN|YT9F89-+s8gmDc@5vFE74) z`SR_`5Tg2riwZV(_}sa3BNUf?pxqA8N`$gSy^f#!H;&RGlyG%CKlyJu+YF-BuK)bp z#VhikJS*kjFlD`w;~}oybAOE|it*MgLvq~6Pxivdvwu?dY1c;XbCfV;E2?aq`GujkWi zGVkQsM?4Bq2dZJXOb_7)Jvc&uTmQG8Bbb2rxym$KiBh55M?ddSzNI{=yrBGAIj#Jc z@)zX|=rkBAjZ|ZWQk8k?t!lnHOBtv5)g?-qTBSZraMZ`uL+bC$It)t=f5~|?3XW|P!@)-S)v5>cep|Zh7B1u zV&teX*SVvwzab`W;^c%W(`F>inthWeZSKt(Sql~}R$}`n3WCChg^!LLH-19=)R{M? zq}`mgaOs!t`fA>aRmG+E-nVh{L)#wR`OW(8D0b%vw()#gc}tgn`Hs8p`D#vH!BAz4 zTNyuLQhegn=`)jWoRgB8KJS*y`L`}wvh23o@4Wjf*>Ja&{=(IPHDwiRmBex5?aB>t zqh`#WNEpbG*UcIhGbG`r1=Er;7G59y<$;Tr&YgVoh%i@(V|msXuo0q!erkD@>o-(w z+VbGTTWggegGVT;jqN22D%W7+->>vn3JQnY!bd4%m50XO@sJWcN*NMiybKwugwIqm zZdVrH{-x;(|0{M!DC0D8T#SaE^jqdHT6X*0+4(CA17&O1S8b_zWaqAL?fc$S|NQI= zzc~JDrQd*2%Ba3k-i4wSzE!q}MT_oP)XSeD&lIJ9$dHlOT_2Y)V|H4`!sU16tSBm7 zxAB2(JHGj?{Rf_Y_NPZ){Ov1$`tLuVzo7JU4H_14oe~r>^tvg^kp3acm}&BV@X*oY zY@?Sf`RbBh{uFtJrDkBdlA=V!C|_;n@4n{mH8(#;qNus~{3%pAQwfg$687nPDi?V#SQlEzS3_R0F1UbYvP*zVRGK ziq(0bz$X^Z(w;9(i&)@_H^ygW*kM!k8qAX9k*RVNv8cidv#wW|HR8*Go1h5zVvG;0 zSb-4p>3Q^xNjm)czhZi&n?U5OS-QP-)-27Jrno#N*0lPvhE(PkIaaqYiltc3JHE`V zEt6`FXPS{ivutRKv4Ki&K_C63JJ9J~KSE9`y|}ZAU|Uh_U&G?AUbs7R?kqR1@0#$2 zA<>tMz1>^T3B4h#^m*^*jmV7l&K#D+(%Y6T&rVBCpTB%5%+!4G`3PsFe|3>x=w`Yx zIyJhHA*&IMW&Fqh(ROxdu1%O(Qgdz*)E^KviQ7mjPUWrx%c{IMXHl>6=jy& zQYUAdXn(r6t0k{ir|BxCXHTzK;g8SHMsK#%s^>OWi3}l|HRMk9QQQ|rpE1`gXX$j+ z`QFe;#Wlbzr3(T{eZKzdW8kkwiwne;c~^>ml-`{G(R#GS_8w@1&C`RjyF-?1Fnk9} z0}R7bBgy7N1~IH8P+DnO7ZxXCo{K_MQCwD0B3q)o1%VP7Y~oa^b#rNjxIvqXw(cTa zi8L!mQGlI|sOhV7iz|6rTv(Q03LltMfE5#qraTe%i&9P3LjBCw{KsXr3`UVxQC5jk z6{sjBtK4W$r8td~$z53`qj|M^O+|oC7n8jgR#xWqQmQO&OG?w7CGT2xwRp9$BJL?P z3b#gs84AsqX@7AEn7|GTKZN6A;{cRmZ#n*sg*se5R=SG3mg;CClnQvR zxp;TocJ;?v%>z77G;l*1HFaF?I=ZDSuQb35cZnbOaHCZPfwjdlcZ0K)gH6 zzb-DXcj8)KK%);a9GQ8HYfm2HXVjG!_ydduqm&Yyel)8UlZO5g85j+gf_f>W=X68o z6|pMR1qVx#tA#ro!^u*b>?_a|iZ6mSmtnpUF5DBuKw12Xf_OTTg;k`Bp0BmVym z$u0Lue(RsP4ssO^U*=~$t1GK4&W|^i53XExVN8{Kvl&IW&Uu-tZ*VVLp1OQq_N}SQ z=DX+2aWB1X;lj*?w_N?|$6{?2L?b104T-*i${}E}PvYpMGb_4WUrs6S39;s~)(sIR z8kYY3og*vYA4P7%@y)d1X}F=OEQ`w0owkNI%f%q_&T=mkubKicT&IXp&02NKmUwcXuyvNj3=ql8|JRkhn=mLI?p8qmqOqL@^-= z(Wp@%fdrHQ@+V>wbyc*esI<~bEA2xqtyHOEidvDdLZ2m*ksb z)KtwO3F$c>IxKWD5#ksZT1So)6|SP($hXQoTXv4HvxeuGjF#S(nO}H_=ByfEe|qk_ z>|bTFW~a4w;oy_>NwelyxGZZDbq5-38mgL;K^YU`@)QOQd3ai>(AgR@8C}$&9+rWt zWm*W@c*cQD5b_VhxW=|{H0!#lsA5CTjfK9Ag*RRZ=8{sKbK!Z`1^9K~E))|wx3@;Y zgnhj_3n#!w^Gk{fNnL;qm*PMACPzI_MGs=DK^3f2U8}w*#4=!q#zQ8MH2$sG?=uB8 zwI?`uB(0^5o5A(#vkPEYP?~9`^Do6i2qf9f3=dHSC3dS4-}DIiT$9A8VF8(9mhhZv!PFgrGhqa=Ckpn+mrK|-=liI zZ-s@~JF4=SwO7e3uo*M1=*Zr&YSm6ssCJf@?L2g;sYvY|d8%zo(V^{hs&A=E8P>dj=Of8B0gd>Wy z*(1NDBJCm#iR=tWLE2GNwnu43${=KV0pwKO(#%wR$I2Jb=xSSN1O{BLdBl0b#zOu>GSJI)b zsIpTqRX0Lg**p^qUGW9T`a<2Et6P94EOjeDtaWv;_ zW{D~NVSzSbt|GEg9_6;_8?y((^F@%uFv`2pUfd_7RDxLnTC1?bSp}`5FsJCqs6d34 zUg#S?inax3M4efik{f7ZU>{YHe+19_Q)+0MFe%0IVDEb-u9KM=wNg~fvP^o`W&w&n z%!Q#DK_$t~p@@IE*s2wpOjYmzv3c&2e-PC3@u)fM`LH8Hr6?AA8Yp_BKDu)(Jd6cu zl(hvno{`JUKou!3q--B)sTf^2zyLhni(MEh<7CFaUS!Nu7vwUT4yiy*C|zZX)zVcd zmv7v?^=zhm1fr|Vo&UdAA$qR4G!q0yhv(=>G^={f)4Z_!h+~Dd#vNz=MSlN6zu!M^ z3HSS>jQRd4Q^OFPJ1271G+&6{ABw<;(}RLtez)}dJ$`?SkaSZvC+xxu)Z(CvX>>L8ic_%KEC;e#}2&D{SjM=@j0ddrGnUX7>f*ZWGc zbMhg1L4V?Lpi$&9C zqQ@q*vpXSkOQ}KE!fS4YCpgqQBOEWRo`LSct=Nni27dhKqAfczF2C~Xtb^?8Lj~o> zZoakh?oTpLP)O+1Y15}4W7ifG7MC15R#xHGw4fl1P-JMRfm>zJ)ux%3Yi!yyHZ9bm zinvhb@rC%N`lkD4&T-MjV;F9iaK)sl8btZW_pZT3wH^`@8p@{6S-3oI-32MBJC@nF zAaD4*#Vb}P;Dvs!pwMYEXU$!J;XP8*5l@6w4I_NY^by>M=c7jAm(rx>_5=m98MEdu zULJdX@(5-si-gkZXDdfB^KiEvi;MUuRL3)A%FLP3E7;VC`RA=lxFF?{j1TpW&1T(R zE%L93OWd$=>m})Xuf(IEi`g$&2oK}i<>_Tlbw ze^yvzlrP$!lNS?P5TCT+CUVIsCK&xHE=wH^n7*`Q4#dX|R(#wHj`ZnlGuqb7o?zc( zPvgFM3;zzjpJU(0_Yd&-F?*T)4Auz0g#S(UYxXw#9s48u3wwwCgT2oLck^J}cTeZx zurQd18}P-rxsKtnJf0`<4Lq4|M`&kyt`Ijjs%K85nd>$)c#jK1~u;VNM zYk?|Ojd}M4{w4ku{x$w}{v>~jf17`oKhIy_Kjc3F$BwaE*nC(IjQ@8#a+mQ6ew+vR zF)T~ew>JssHcCAp1w(p*Vi`*tG6ld5`L3dhR;o6CBEbEN#N`8eIeiU zA-zA^_n(1H!^G`+1-1^q;Qz_r;=h5#!yovc`QP~8VfO%eBXnU1FYF&8MU?PiZu&)x zh!yc7No)|wB1NR4UTGp-WQcuYzsM3fB2N^EVo@e4#LeP1ai_Rj+$TOQJ}W*iz9=3R zUj<*D5zmV6i5JC>#7p9*;^*RZ@rL-7_^tSz_@nrXct`w0-1`y#-v5Z{J}SM*bpI0^ z5YjEl2x__vm$T(On7}NN%j8NKCllm)d7<1Sx5|s<4!K)iCa;v&$OH1@)asDD7Pes} z@~HfTyjk8R@BCP~KTb)LY5zSA+%4~upO&AMpO;^h56iF0N6+Y?$K^NVx8yVOS@}Kr zqWqD3N&fVV_@B$y^0)GL@{jT_@*VjP`M!kSrv+)C~D6WhuPuxdE}E@nH}Zgv^Fl3l|N zutV%xR(M9b5_S|T?s%(s4Xiw`6d(3i$F5^Xriag-w{XcaSkm00 zdN`@wF?gA`cSh9wh09kauHXE$_%{9+`!+On4+~*avG0mtbJ%>?QJ%+^!$x%tOJwK6 z;%qb9#x7wy*&bMWUd1xmL3S8CFJh(a22+>1oqdws!|rDfum{;g>_1?s`v`kX>B`?> zq$U5r)S@g6+0u`H&;G>z%HCz~u@9K!9@ukEcL^{Ac_%{!7?{{+hqdf6xB}tI&7(d;9|~g-3*lsbYqR5Oc(Qu}GXJ zmWx$ljYt&di;Ki&u}xeec8WdXa&eW&6bHp&ah)gS|vSUjgoGqFJ6GOV=si85ixr%z#{UGap#TklXtlzyvh+?;|PCP zFp=qTYm=tHoqyrR6o5;~KgL})#!ucW9O3tVZl7xP;*0U0@K5MF-a-uW&88U z0^O@N&dYHnPe$>HkZPN-pS=|C^WKMXe}Ls;pHa$gW_Q6N`9bzDY?8kY+wpI*?_h+# z$X;TvvOeq#-hfRseg+yN`+df_hOr$CTjfYt*84H4SMt@^t0iJoZ^FKAAI9@RjO9{{ zH9SSka8BC7F}h5eE+7Nk%LP$4Y!cE__%}jV1zGvNN z=V7y!vg^x_m4BgJxrBtRTiKV+TlUx)ZuacQRy`IsiehUoeC(>n9-GRhK8EH@z2-3h z#ImU?aO^k(52c*tT!ab#cL(yw9b{24Y=lN_z0r(wn|S6(hbbS7`wkbF7Ue7Bte3(T z9)*i4ZjjS|tbG9BS z+nAFL6IQ$mn4Tb)rv3bxi~`k>tXph@@{b`O?WDL}{IE9?pYxPQub+8~ zZ}W+GL(Yb_y^Isu@peA@i8347@e}2<<8d5oIy-DbXFj_e8$VGxo1P-ek~dkB{+MmX z3k$@u?24O~VyKp{j!VkNhi=9W6crV~a%%&uPn=)m|c-ngmA>(41VYrQPoKCoqxp*Qhe6~+anH7ao2zO{~@D%)tSP%=r zEu6ubCy|LMi|V{JcO{#NGYC9|iOsfHuyiR? zPo16tR#7N!BlCJ^d&hAL2?9RK6psQ!kWieBxFM#SWk)zcchMwkD1GXc0#GwS%xaMnnuy(2UCBX7hy{z3N zV=lb3+cGcYVQq@LdGqCx1^M*G73+~S+PnC&u8NECtx32#3nqcqBi;7cA5pAEjVM+^ z)lqJf`3ihf^*9rCX?fWhSTojM6h4A}y?)eo(=sN7IYe#+JV(yatCLo*i;In09Tyk3CN4g1ZCpZJVq8+(x;3$DRP5&`1rN)3Gs>XN%8B}#;#qxHg4^jwef4$u1#2*xHf6+x`fz- z)d_J4YZBrU)+Qt*Bqk&!tV@hdT%8z~xF#_^acyEkVq#)a;<}{Rq}54rNo$hglh!6B zBqb&#C9PYBDy~EE>p*%PvaJJZY%h$#E~d`P z|1Y}{w*A{4@h=_T9dL%z$c-uM?A|(4V>^4US1; z)#kfdR0m`BpCKuzYqQL!2I$@D^stxTeqxi}NRwS+Nfi9L#`!Stq$oTYWlgRrAW}(`N*Q^9U`H z&yuq}b9j`PAMTUO<%$se44Yhy`}Id~=J}}hnEns(p6j3DefdGq*DG$k=iY~7FW+}h zbxqWNPMLa1+Iy#0uDWRd)tSGly!XCOefn!pKJ%RyUi{&Y-Z=4_4`4Vned+4>#P#Q2 zu>InzEAK;O>oecMBK-1+-!Lt7iW$59{4HCzUwlnYZsn&Q`oa%idO393QlPe9e%1bK zGIMh8{nXcx<%J)%UB` z`|eL(eCg%wJ9b`v#Wk6C-FN?!-+KDF=U@E6YtzCbuG;_3yYGJxICA|PZ%&zCSQIrk z^QN00f8v(!w1-E`o48LTS@8tW#yk;vU1fUPdxqH4_U7UukJO@qgQPA! zQeYq(noEYb`IMQi9iBO!%RRy!9-6LgmMbLJrn#pY>$SOyGkr(2YZnK)T(`Bz+3wr^ zDX;K^2So;j8DYk??qK(9_Z6PyuC2i_nxS#IIwVG$?GBNFW<;%8eW?sI>Ko)#xxtg9 zFL&Mc!L&$y)wC6I!PEs)1J&AX56%jiSyk&=o?TdG1b#l-a0Nba1>Q9N z@&%a~bnE`GK${-;u`4+8d>QOc(zoh{yDVg$yi&U&C~$jZRB(7unija*{aB+Bp{@Rs zcI)d)Jci2^_^S8Tzj?TCnH#b9YJq3v962?Vxj9GUgv;X*x*jBgT_IwM=H=7Gbk~e& zVH__l5VJy~Tyynkeyw($ctW;|m&Gfh*LXGP=i)W-Iv;SoCEnHs#UFh`+Az!|-s8r3 z=U=#E=lx&&;(r{w=Yh{Y{EcVsd=f@p2^U;=*7vZ%{`jXW z51cyv!RH@Zx#~Rs-Y+zK>C26eG=2S<_D*+*F>~(vi?;0g>LcAhY4FUNy>Rh`7rlM* zPanMSqUKw)c!_^a(t5m!B7M(Z>%|cVa}QltT5;1YcQ-!zL`z4{<4+V8J#fv!V=k$! zkcT8+wK8zqT)BE`lolN{&$Zl@qD@&Ac+?%OMQeUNK4iz{TN8uAgZ0Ssx2%%~^`O{r z*8(}m#gmh?OI)k8U{8=I*>|311i_ZTHQS>Zp7iaBYeLs}R_ejGF4?`quP+Oqy(B6u zB4`H+NC}md>!E@!d?TK=V9_(J{3BEO9R^VwqH7@k_hYpp8 z1Um0dI~ZEI>i$=5+xDez-?q-POxy2X61+9o@0xL2%T>9TXzM)Fl4%S)IILIxd|A-L zZ{NCRg`B48w^rY!UFQmwK_2g?v$h45Z3w&*T&fq(+#2|Nm~nZ~tiUI4-6rqYG<9ZW z`uxCK%LA{jkh3*$Yx4YQ>s`EZAn@+eG~9`c+oz?ZT@d)*1~=FCy5_`-Tc^ZmImTtd zfya~PhQ???829eL=Wp*rqeEqxafJu7XsV$lp)r1a;f`A~jG59UJwbD2h|3)u?A9Sw z13z9IT7GKb2S&Z zvz6SvIOsC5<|_y?%ZoG1igL-?HxtUmm_?DiH~_d^^RZ94ve?x#8t_^k-+aTDHUCdB z4a=9s`eKT{IuIk8vR2G{FKZ=x-7aR4~ zw!h9>kT!JfTRV3aElB@D`9@ z@bV%4!R5FD@~{<9P6YmVYDnx%FVBUz6M}1t_`Eq+g{%(>;*lB;f^={#mmBnDxcW&% z22F=p^8|~zu=_%GO-GVoScnO+4q`+T5TksakUWIKE+pV#A{;^nsX<%k9vLj=;Xage z13CR5531-=uF>r!QKc%OQ1do3N~}Zu>{90PRNO>?IM?|uEQfQz6I_r+ok=y9Gn zg`-4Q2#*fphcuWgLU)-ZH0jks;dSF~7JQd;@&257uovSVor{nl4&lzr#X^2mY9ffc zvbXOBoE~tDo~0SlNtX$c6#^n zEFW{{3%ukf*aw?5=mDtvEaBo0irLdc`4W9r$VwTDwh3__-we(RVW3@fUkozBg?29! zI{zcFjpM~&*ihqGkpGH*#>FIBsrjYGzXJM9d@f|Qc8n)XU5Zu)%hjOh;TOngoDN*b z4G|xNdB-y)G1AQ&xYTEwT;;IioZ^wL@9Wf%2;wOEiBfU#d(_JfI7jT&33Dy+0tFeY3(cSZvLl(L=#$!K>4!_L=VnRkr^BEudY=P+6CszBLwYvsgAaJZ2CYu8 zR4pJ_hIX2><61vwRb1=ftd?s%oYix!i?aq^z*x1=Y6Yti@l+u4F2)rr+vmftR97@Z z4Q_N9_@V4pS2Pg)F5{$&4Z4!ex*=^GYsT?HGxv9Bte5M(k`3{SPK^x;{P=U5jD)vG z;;m8vO>fXxl?JR?MjUHGz9t^lq_K9cw@cQ?sk9!-*B~R|ZKHgHQm@k3khJr8*-7L( z$)nM_8Ubr))ZwGzRJ$4^s|bL|FxRUkYZuW#oDjw-!G@^tGwVkc>EL>YWc|FN1#P4x zjWQD6cBMZ?)##*?yQaC)xEo zEmlZh_(4DD_Usnk6cALTX4iC~>oeR~4F-vidwRMXM4lU8@#s zzg8(&CD%@HR%1>SOqV{+8qKNG#ETiL6Iz2{^#Wsn#$ZTVIjO&=Grxd-2XdRO%&D+S?esH2pN8!hlt@4e2?(GqnDAlM2$~KyWw_dU~ zp~9I-VT~ayEvzAgWhKMc&yB8N%w2Cy2n&d)+7LD%jHY08uD2?L^~!qQ9Kwb*<767Mtvl!nrgI1u#Tz5 zKm_Za+J*{NPK)Y{V71eAU`|gr8X{QZ45KZAwa+MSMVc^gV+1=9rUTP7(?D5mGh;w! zaApkr4dKxNlpYSI_lFxNBG_O!4pwU-4CJhjXr<;8&$nO%p&*_LaT^dCgbgHZjijN| zFEEC+lY+Gf4I{Z-Xaj-o5nyuG;{cy zY1V!4&`cf@)9CJX^B#8?2DX`#@QZnbFRxa*+IVz8$`ibaBTy}opN9WvI^<94leM%M zo{^vMEuNDeKIl0E54uT1?nd3!;?b*gdBWp^zf#8xJCiQb#hh?edt7yHeaJ1l-9GqF zx}E9HZGMDf>|(fZy;ZO(p|{ZLV3vS>IG1tg#k$1siH1Bl&8_ujUAjIbSb$S+RdXHF ztb=PPv76I6+Q&7Br~$6k3wDAZhY%1tI(pEQ6lPzBvQ*pw7?`#hWyCBRE9X*2JxWVs zzI2n07ppL%l^AP&KgT^jXoo^iX2lvJ%HXleo9ZXok?O@@-A9WdrHhO4V=XH%uoxew zRjrBpdT4>vPhwRU1~35`4NQ|X+8}5pR0lI(P;5*UL{KsXiOx(s{=q8J`iK{L7x99G zjuodzd9zqdhN@Q%o;lRpIV%{c*AzTF!paO(E5P@uJmb}W8rVv#)mk+y-Xv=+`sc#O zB`|ztc}6$HB%~pxA7-e;JcZ^2{Y9(Qu(V;OnRP#xa_ljsV@wV3WRspA>D|x#7GK8p zXMDkNELJDMN6d4f_rot}`Knaw8Tnf!)wxIN@b}RQcaGy}w$%qs5`BQ>i&e=e_#0)I zYP%QX8p|luCt6@89%EC?aiEl|LaM3J39J3ay3MX~6TNqd;`bOfS)Z8}e(f`6* z10_Zn!_Wr=mId%H3hD@!4q*&peGuM$b7Vm4>5x|Z3g7-_34TN(e={e4l|aK!2o2to z!ZpP5M-R?Bo9oz}y3)UA*EDC%5!HQt;aZ@etS19OlOP`3*pW1#v} z^l0p~&}lM~02-1TfM_OV)2!1hJb=auNY^WaEJkNUn+v2FZH+O^v5XW9;5weA@<5h= z^Z-u@&}<)-RpX6EFGee*0Vly(1I3eQdQ-J}4#kr;R9Iv!T$Y9uSP?+FN6qnb8Ow9A zcS3Wpiwb5n8k)=obiF#54eB(O;82g{c;PTh(fc%(hWbG5)ao_XD#ALUf(ukNVD6== zw9OTU-^|km*9IB`_k*L!&Y*NuqZZr#w)XtH#)3DNF zr^<;^9Y-mOcBt9k#Q9~mGgbzVig!(Rg?FRI58j77V zuiId?t|(wSp*VLU;EASy$LKInR2cjLU2ij3m5%mz>N*g;dNuDd*omMaj!?DX?=jeb zp`SF^pkY=BWi7DRq%KL48cKeX$TAm=R)}UfGKQfeXwW+~?@64c(Eb^Dqrge54N}L# zH6#rrz*ZanYOTzScUSQ+b9N!Eo@A7<6!bOxL19`3i>ySu;1Hggq&0DtqhUtn(X1le zSe+vTCq$S~bsR&F;=Cyu#u;=<%+!7!YvQJ*X;8RNLI1)rfXOpOCrpcFkyd^6WK=Ar zz@iaGB%|6m)ahRCvuLz)4T=F2OpKLIfq?_(s0$XKT9{YWx*GY(OIVNFICTkY^%z5! zu;CD+dpqki3VZZev; zvy+>BHJ7sL6x5?R#aENYdQ!qpgG@>W&>dUSfT`JP^k2*xwi-R#S@YH?jIynMpigb3 zB>mf>;6J@BtZh51O$}>Bl2l_jm9?iD-KnfAwZm<<3P7k z@4~QWfjvgm^{jeNB?ef{<$C4y?8Fs%;CeQkrPm#0f$Ulj_?>6e9%b!NhEc;jU-b>F zBj49@l%3Akd&^kUbs4|}3Q~coKB8BnVMhovbR->^&O!}&y9+hs?JSCHxtx3yWvu#oz3zHeQ>GzTZ5hM}3mhd1RY#!>TmAniXTvZJ9}cZ@_b0Iauo}^qz&axI zh6L6aX>=yBj!2_1iSta`c8mcY7V z!YUKl=@{cg0_#~}w6A5gtBjg>)*kEYOJc*Zb-X8u)y8EYa57Fq0jJ_L6mU9DLjjd* zG!#(1hGI{w2}8=$YrNGU9p6g5jPbe%r(bjerk`@*1kC8e0jZ|fyIH_x47+f6(W>TH zqAAO@K^JyY(QVKOCHXsKG;&mF-Y#g08iYx&mfqxI&8{Rwo^ZujWz5a!;MH_=5DA5^ z6)GOh7^7FgilDy;Dl;8e8*Lt(!*nBlSkwAEI4IYf zJm|hg8q`P^oJITZoH`^h!{80PwfqDf;0%)*ZsEO9aY@aML>bkDHu;46x> z2jTsSkJ0@Y20XeQCzc`_dQ*#t#Gb-FcJL`0RFg(?7vCaIo7Es1UaUXZrwAQ6T12#& z)^M~tC?l*1OT|bXLkhwmmg{*8T0}IQ;jK{3A!(%sp0rABz=JU!R;OcC_YQh6#tBo8 zWrB5UTD=DyM|2w9(XAdf=#H-OU~MP4VA95#2QT(RVACL{H4N$qwkX4N4v(^}Jwr4G zM!h*?%r>Qk^^m4y4APpPY^DoJ2}9<6w4#||FSK`vKOWmjDbdd+KP^2ZtkaEiek!X^ zhCzQhDY1@rXsJ+Ns$5hvq+M*zWf(bz56uB#u$sBUDm)1=<755 zjbW@R%qq*v`hn}8;cp0Im0_6VpgEg?#rwUO=DmXUhJ!*sMVAuEicVBcG&-x!Jd5h) z(HJPhJQ7o^h7>vEUjdPT)kWw5+8jk=>NiVotp;r-`Xn8A`^*gl5H%XjE;A4I0LT-- z9HrTe6riI;;t&t;%A_h+(@bbP`iU^stn2k*(5%Q`6{I!HWYs}g=#3z-1bA;!a~K<* zrU$}sf@-vevF;hh$r)&%f!LuL`so?0A&l}i(f;2q+ZvNvkH827=TPQ%MVBx-u3$CN z+jRwW9r9PYdPU2XsK2IVoUBONpCqQLjinsa-HZ%pJku`aT_~|QH-Sj%NrzI?< zXAc_+FGJG$S>FE3S@W!BBWm=jiYIctjw3+!gSQIUhYSk>Iv>fNkio;R?U zwalZD(J|lKxrg=5_crcfO}+tHy_Z!*L*lWHC3@#BR_E9IcCiLO1SxA;9*@ErR_b-T zS<_1N9&24))_4VLiff1e#M&st1`?>%t%-WqF4mTGN>=V;%^Nzm`8^N77edg1Mnh_l+Artf2*&Z6+4 zry;;SGYr--3@>s7G^EkMf((4fpE2M1Xi(1RlYzV%J{gcA8PInjp;$(GvWrDtgk=Rw zDy_md-j5LmN8y-_jQOTR(bxnT1K6?!(f|<48|wf}y%;dfEi@hvhBpMPQh9wydDF8Hb8EXejKLomforghpCKRM= zQBlAcjE@??eMhi-0+zwHq!qBE2!}d=gW#AJ4khV&)B`Z86xPCk1vfC33|NJWyGFq7 zn;{1PldBPqz&-W-$Pd{50K>2DxR~!jv>kAu4&?%B521d5cr>0>0&e&+G)BPmN1%BD zHZ(Cjv5$Ps$OoA7b;j}lQ=VY#IADJZ+6CD74YVIHz7_47jWA#&VEa?Z2iWi})Dy6> z4dnuE_zw20fVJ&VeE`$Gk0A!w-wBZoIP?P&Pe}hm2n7mvqvPhd*y)!+7clu1&;iW* z8AJqNb1&)v*#2|W1F+yVEGK~dfP;X2uY+$5q9t4cmIg6Z+^ue4Z0piD~SQ=nUDAXmuv>7-a z0t^d-^Z*Hiq27Sa>rrpO>J3n30n;vmW(gR-nX?AK`mOZVMx@(@qZ>eDJG5QE=u0>o0IWzy zd!kXln>q6V_5?s5u;5mt2h6$+$83ZL+z;4xJFZUvJ1cRJPI&wvTODA|T~JB@Tkqkl z1@QR2Xg}cKeb~eU7C@yN1U&UJXHAQV?}GI&LH{g;;&~qUl7M{%VAeXQAb{1I1Pk*c zK25M_z}_naiwCT^O0YCQqg=2u!1NoSkO1aXK%oFkI*vnb!1mh&>jkX79f}R$iF*X| zFU35r7Ay%c=3bbq0Q&C}EC;X+ueB-zjBJKt3aCGUYc{~>CvhYWSp5xLJ^;pl3&ums z(Eg_div=8f26O<8=WvS;*z&w!1%NfsjgA8rzkv1uroV{mdcdw9p+11U-GW6ehn)Bc z=m7S;Bv>+F$IGyw0gV3{uKWOF|A0d@z^r#sAHWa&C|M_PaMLL5MaY$9NYsA0CoW;--z^p zt#?S4zRJZi?gU-H?ssw24OsAZ9OnZLW#DiK^P=`j9OGiVow^3c!5CMAd9c&~Oe@w{ z75J7?t}z(zvi7@RX@h(c;>KWCi$IVL9}@+fXEVX$)Ahsa{Qx}?s{wugfheiSTa4$}g2nQ?kP6Z?Hk%Ui_6&|=7n#mtp- z9t*0&`q}JfvSS(K*D@xmmt(zO&Rk(Dm`q;DT>DosS5qvLJ$RU_I*z$<2IlTs%S3t- z{`Vr_Atvh#Y>qlEF+ z5~depEyCf9tEUX>csX-bmotge9aq&2OlDOuJq1sn7u?8#s*j;A$C!2+F#0%ibsT5< z@tc?&yqS51Z)GBI8`I(|nQW~@xpy$xa3|vfcQUQEit)ZGCR^^pns^t}3+`sF^m`c3 zxd(LbVWJy;FV;I>H4{~UO@Q_HLLT4CLZWJzcA|#4{GVna>9x#N`x(&xEYd&7wBrwg zCl4~|qqVS(xjO4uQ19oNXnhDez(dTXeStCk3oNMPi&#_f!*zNCbDeBJn;vE?t&z#F zuQ2Za3e$Q3V;*5rYeK(ay{v6wuD-8f-Fy`7cpUxrB>4IbX5>AE{NG}(KpSw+Fe9y< zN#j|hdzN{sJD4`qfi?bljGO0~EBX73rF+^t=+{`V)`#}=p)Y>{9prWN z!!KD-;0+d%`X&ouzhYYJuhH)(nDG6E@uA;fYyvj?7VR5E-~W!Wyx(DL|DL(JPh!me zk-5BoVzOxnX@@Z0u^vYL1>^LuEM)L+OrCg$$*OnJzG2{pncV+(7F7Ellj;9tyzHOg z!TacIm{0cNclGkHCKhlWT!FRlI7|sU0Ha`dS?S@SwK{hl4}vU*bz)Bt=cj^r&`=1+ zBbVIWGzF)nQ@Geam21q)S+tjjXHDa-$m#G;=b~*o=XJ1lPYdU~C4!5{NbWfmg}ifl zSk62yPR`@Jb3WJdd????MK9n9AJ-!na?QVp&nQ@gG8e&2U@`X;W8JE_i1Y4?xGN_a zYk4vk)f=%^ZRDu>W%Cvu(zcDev{WwIQ?ah3!q5=w+KKI4q+QHi zr!VFrfc35M5-y603xr57|o!pg-b+0&`i&KDEySSFV8$8$zUR{a( z=vCa~%i+8#mwPhuxx2rHy9c|u+y6TrlJ^H5-1sNXb=ZEi{TWBnf99SnY$B5W1_Aai z=kf1CuXvZs)PHb!`aQHw3U?*!RQ;hMBzdZs-acCdRYZx9p?N~|F2b6&SZJ|vg0-zd znzh)0uN5*q5o=iQ3U%oV@=yEw0gkC&B9fd zB7*jB6TvYT3r}~Na1~!FTwyze^zIa{tevp-*$JZ(z@%M5r0o%W-yR{FGK3zpPw)*_ z2<`Y)!qaeA1VzFCH>*H!c0|b2M+A>AL>&r+W)ulAR3wbf>jgJTL{M3&2pT9Ce7Ia_ zVOWQwZV;|Uti#O}BB<{Zg7+K~uIQVDmV7g4-2&@|TX1=Ni*Tg`1W&#d>*lS3*I=El zyG^+E-!AwmtkuJ}!$PT2h>AOeYu}y1mGnsw(sGZG7556!ey@;S_n}?)3(RLP3BOVPI-rli~D)Reu_Gjs*a zysnaZ*M8I^Q+j%{0S`#gktg+nYo*q39jrXAhnOvqo~m-GrQRTedvB7W_7?Nt^yeu^hb}A{afd8-H zzUvjqyMGG(>8HrkD_KgfA6X!OveyPttECFMeB!eS?x${1@&~|15*cpc@tc9k<^9ki6?Xn8=)#qUE13 zet2K<#$cQ}z@&U21ee<(8jm(`P=?1eP6PTvH7#n2#*(IJym~4wy1km{o2j{tIT}x! zqlx2qD643M*Zr6C$c1_@=eZ28v&81zU;Z18y!-w|s+Wd5!0TeTscyJh%ys23NMQ05k#o75kr}BGL zrjMJy!I7WNI6jKA%YVd?pUy@;inH@S{%^Mb*I%~!Awv~*2RdOj{KYuKAs1-@X7jZ> z%R(e)TW}CM8M!+fEjo6yGlQ&mJoz_T@%A_MF}e2bR{DwJd(?1@QTcslF6dec(q5RH zo~sZF*z{v_EX2Np77QG);K8q09@RM_vqu61dWl{W~qT}RS$6<;c zb#{I1_C!2lmA_9V!Is%9zvXKd-p=`?3fuXsR6e_2-%#;jv+43~uyb0_&R28P3b&P8 zZ~*%Wavdta&4=B<(VTPQ?*I<+;w(JWcNR7dz5}@N=zJ4+$QZWrQLCMF7D?_$_$1*& zXKmzs=J(nB|BK3pEu86`{FwXLnRIa^XqI=Q!e6HHy#wgO$BEwyhLH?%;;#WdbR2vh z@Wgv(dhBT#$toxQ2=HFuTkgl#7zecJjQy4``Ri6Y?9t$Cr`~ME*mA03i_6S!(^2G1 zSxSDdN^j$BI8EVg`FlPh!tvRm@U~omJ(iip9(Q(r8*WkY_Bb3?^$~Ug1?~EM)cVkL zwby{Lq(T&b{!f>8T$T4>?O&wIH^x?Kj6Y1pPgS_#tycex@KfPGE#d0>!MwrmCDk6$ZxNUcD$3Xl|Qm_*ztBdZ8)sTEqKvN*Zczuc6C|M zPG>{^kI#&EmN&S_8mURq7X0=T7XPlave6Z_DK|FYPJo<~FT=!OxQXKFZh>5q%I_@S zui`UQyw6NM+RwEglD}J(uRm$kd#B1zdJf6^dlWuH;Xe!fQuw2Ar!m%9t;f2r^pc6< z+gDnW&yKhGY{Q;di_$+-x!0&do(7D;$BBOiII52m|C0&Q_o(zkgYdvP`@?_I!q+Qk zk5l`BiMJ}-L)e744>v=`rIlt`s+w#GNNlGr;@hH-C7WL6}!j5;g-=2RH z#TU0(^z_9RMY~)Z?pN~OQ1SOYZSlvJle3YH^!bieR(@x^y^nO_UqbSDAVppyal* zzxS&Od|9R6XJuzNf@ab1JMn$MQ+ZDO?|`Ry;>7<2_zT9t{}p(cVX%HxFK7SLT|c>r z;>pHfY`op?HjH}PivRL=EIyz((>1E_0u^?~JL^yHBy=I|MEt9}z@ltiXhD0vOveLN zFS}m$dNi!~;Zq6h_O!olNw7N<-OE)xB~O6giN`erTR9H?7z7>3g+Q`Z zvXk!{@S+BCHJiqdufyF1Iow~E@VxI@?L2IT@J3mnBj9gScv~L*CvY_1!k)F#+j))s9z2&(x>je`9+})yG9pjguq}pxs+lH|Ue_G|c3^d5~ z;zM)Gi5~=x#)}jG9&q!^93i~K3{O+?em5C z-?!>%%b_OJhsLWBv!Ekqam~cE8)O z(#EUwS6RvNTMJgb=ut3oHh$v-_)6f3*Ut2x1dis56MqkIbMX1nk1hJPp8XhbYsSG} z0X8fJ{sWc%3i!#rgAbMK#N)cu)UBO(THm6_N&gD)vE$$k%wMXflMYN%O!@QsZma(D zk%pYTZwda1h0nCm_*FVfpK{`VI{_Y7pC;cPc*&wOSJ8PG<%_kdeAGDjb|~bu9y-(i4tSCSor=EQep-gl8wY<8+Cy`& zLG>%Vrd!uz)emeioil#9ibrLTcD!?b*Bt#MRT zMw+2fJ)Ggrm7J45xAa<+LR9*@RoJfAXI0qd56w|>&nrB&mE2B{A)fRrJe@U@bMoJr zuNwJiUbM_gA#n_9e0F^mk;q7u3t9WPnPzW%R`A+;P;1>X&_bIDIPQF&ahK$?> zJQ6{!?zY~+UMr<~yWRQgO+?!_h+_LyJ` zt#wZPwZPN-apFsXr@nXM%YY}|I`KCEPwV&BRJlZx9LZhkmoA03$I}pSG@l0)-X0Hl z2E{DbiT^wBi^fU+9`GcqoasFn##9FkqzhoTz0~6qH0^T=H`kN=9 zk1O(V%Y9}7`ndKs>toMf6lFSVRN~wWwio2=czfNoVa?4}%Ff5EbcsJl$HCe25+@lL)?mHl4k%B%4)oHh*lG<=~^42w!%7`&`eiw^JV2=Me!Vr=9U(U$^+5@S4Sc zTMu=vL+L7>)>d*AO8)rF&}hBMc9&c~9;zXit@7yd zma8z$F`BP-du=}1uu1j9VU>^8c5)<3NxzyXU*xk^eUEoouwTK6(vyuXxqT{~?|GG9 z@!77A4XYLXER{azL(*%C{(c4PRK9xEt_5c4*k1sW?)zPZxBKs>DokftMR^TX|6F&_|%472jUei%pa;K+R^CxGz1-ljORj}+%#m`S# zkZhyL)!l7Tu;sPQ&(wew-=SaeO=quaue~njsIXN_ z{CV8z?*&2Y#wyFPY(Z5a5F^~0;;BmUE(Z;!A4K$#)F1($k#yFyNzs?^1lQ z&y%9S7FtuC_-dsOJMneElm0#)egW`{-mv(*2!3)~RoK=SNw*}q+NAK5hg>2)8}V`C z>D+*1trLGfaF>pQzYe(Z=oA7!9-SgZr(4yB)H{af z3^*$DewClb1-VRoFeKRz72eiwF5(uyop_j#oBHp(w=6zvAR=&^fu}Y8N`>EOVqxb7 zNIK3F3V(^hZ^C?{awn2Y!%FV_RIQ6Pzmk!M@^#|b9CEfCvtf+FcPaZx8%~tor_%ST z^fq*s`x}%+vi2QSAL1K1I`^i2z3kT(Kl~<^v0GI*U4^egI?7-Ej+O3eh3^I&4^RB0 zao|i($C5M_f2!zY!A}m4u$kvy8{f6)UuB_TQf|?A;$cQ^mg~e{itN;1PW;^y;D6+{ z`r}mj`2A5;W#O|`eJ@imRlyAku2FD_f^!t~D##Ul_itAF1{Hi=!Iu<#PQexhzpP-b zf_E!epzfl_ z3>?WnC%yzYk_F@8Q^&!V0!RJpOi%KG=C2cfGjQXTdmHdH)}84;3EYBl@Rbwb?*@L$ zIO*>JZan@Fe)%}*zW^M`SZ94&fg@S(#6JZbtu;meu=?Gek3^f~rxQ=K$HT`>fWH!W zTC<&WE&-0#Q78U>;E4ZD{2jo}9tTf+qxHy{ex8yKPW)=%so$OWc;L5gA;YCpWDbEXSLJ84`D zDY>YUjqE#y-?rrCMDg~$tsM_?rnBMT`Br`Hcqe`TAJ3$(s{}Ue|LebwZ^3Iz$epm^ zAFS|w|Fp{8spvh83N3-(iMRD-C*IbVop@Vc9uL0&^j}r{9#G@5U%@98{uc_iI?}zM z!aq>(X-E9aD*T*+eTr_cf=@Wo+vWdM;lHQgYYH|y(s!$HyMiqW>K|BqwDr5CDs0QA zEVP;A?pq3P&$A9kJ~zfYtzUlg+d4BwknIJYXgB=L>NmoXv&WA;zU+8=T-dN0&+3@& zeG5h@I4Gzz=)vYU_Da$UlME;K8zd*pMGB8rza8O~GX+3D?c?abm^ zizEM&Dr~pbSzfD(_bGzCDok^k+;3Hw#xgm(e48IO9KP5pKU%ZeO=~8(3sl&qw^M}; zE1F%U!gl>WZvJajewz=@_GPMgo8NZ*o#$3L9;^P%UJEu(vtYK$?^E@qb#MVbPCV`H zsf{*$n=dx(y~LvTHAT-J7rDqo`{9Z5olx~Sso>yri_RBaXO6EsQ4q;hC;ndGY5n+~ zO7DZ8+*SAxg+C~~-9I0w@Ip!iH#E&E$7=jNial0( zdq0EMIGA=KGXG0IC_1cL~odCaG;lHWsLu)CydVENxysGdagoGPV zVY{8qarcgjC*6izB;H#?&gw0ARM_UDGrzMuO}Fyf^DbS74XpRlGgU z{)SZ4#!D4`v6ao#*-0Kc@wCP#;PXqm@PzZ5-V$jD|lSNDg|p5Y*4US z!8Qdu73@*4U%^2IPbtVgW0hwp7^z^ig0Tv2P%u@&3d6s%XUNx@bH zI~43zuus7g3JxiFT0#A@R(-=1^eGslV3LCS70gqxOu>MH)e6=r*r;HOg6#@+DcGyv zfPyC#99B?!(5jbL!6*g&3dSp#tYDgg`xMMkuvo$43RWptt6+nI%?h?D*r{NTg8d2( zDtJmkyrb6~(ey4$a*+x~D;TTb1_e_U%up~(!2$&<6s%ORM!|Xon-pwSutULa1^W~{ zq2Q2$rxnycr}|q#pMo(8CMlSrpxLN@@n!4%&LsA})`@q9?ftP6?+p9YdkTK_Wvl+q z_>UT%sJ;{7ZMv=BY@%bw|L5HQtL?JK6<3~-uQ4oq@j zk^_?*nB>4D2PQc%$$?1@Ombk71Ctz>Q4*VbFz_bPF#d@(9AM4u>IQHX1oHt`rv3K_-`XgVl zzS|53G-S!x6mWZYZ!FdWx(SdylCqf4^83c%2r~v7kS1M={vJ@>W0qi*w0jd_ta9m_ z$XcO*W>;xrV4kjs?4e3XJHo8(p3z(z%_^yO?%ot|S)`B$e`A_x65C_NTLl6W2+{){ zG%p~L7MZ7k4%!=VnLI;nR3%5K5moSo+LeJ81;UVm8Uw7A*lr0js!V$<>J(vRGBZ+v zX0!Hf+P4u5x7)f&B}WUkm_j)`_@eronN6<>5=un`jg)1k3-M-!ZwmOl zR7|i+r5XmZXocp@nq%_OtVIUA<`4j1h!Frm0Okl$gsGf>hbj{=Ob&w1u7xZsFSd%) z3#Q;MOwLTdaZ`3_?#=_(<{m7|%($^Qm+iWq)ewG5#lg&tWo0G#2g=KGx0I9=m5kyk z58`u+%kql~N6|Zv=9U~PD7wLsdUttYS^g0xjXgJ(mgOFCB&8ZSP?u#Fl#j(xlFV(n zg}Ej92R9ZJ6dk1UQQyoRMTNP0%1X))ma&@YK$R67%{)+4R90G6l3mP7>Own$y*0@> zHx}e%mX?(t$UKl=n3G$=uHRQvqBwD5S#D|O#!?W;FFed@Scz_N$i#0gDLP`sY%I*# zlmCfaO5h!nz{W+IxXoM33l9=|GI!+Ou%&_$`^<=xd~g~#!uZExDo}4$5<808lUueO zEFx@@i3Mjex8{}|%p+9t2x?pY(cHq3?zl7?335v*N$N-vvkoKu#Y)mgV~R?SWEY@0 zBLpbtzLEHJ@WQNE)|n`)NqHm4-MOV@MI|OviY?^P?1IdUypr7PoYL)u`DOWLDLF*F z!c4Gjd)l6|?6Ta#J1eB9oa|B0$Z$z9py(3#*C!lGw^B{2ZYIYb84C6yf_!r3QQk^M|oQdvJaaBtL9Ano`NECh}4ZiZ!g_b zexQs53US|x*jt!?uqY=N-Lj>SJUOH7ZE~jAo3S-%G`i)?=yB&_o0TSWyK}g8Py|Vc zl-z7fV6c)dK_X&r;SCT1nVYf?UI!s@(5!3E*q9UyKuEt)QO8R9#>RswDcNP&7^fr> zsi^~w=;RW&wxcXs|6;+vJ!oDrQ{T5e5&y0K`Cp>4aVCJhd()xstdo0)4Z z6s)Ah44cySaBf*<{t>h(GY`T5A{Rv3#)~dgQ?FyZ$fHNHb8<>DOAGRIOh$K&CbEbh zEW$cNw0q1L691Uw75OHvk8s%siY#soQ0TxXa!brE7^Kj_k)bhU1`CR;{u~|&<>Z?+ z4`@i8Q&4)NDVVD$bf_fPtWu2`D1}g?YSd8(Z9jN}DXJP&Ft_k1#WtxRt&By7aNz6dD$_^+l)lj&&s5HMK z^GGf&Fo(@#btbyZTop187Gdt?VBIJwGV9WCR%9u%Wfu_rrnBOR4LLVtTix4oRSWfMcM2%JAhuxG)GH~!q|hUj>4r-C~`7y%+D>b zMzV=3D>3J~m90sk3aru4LctqK^2@BDI})OiPmX8e&8#B*M$!v1X)NcgO=2-6KbOG=B-@A+jnvN(;^eVgNrumdlLvbvkQcq9I_P7&GU?$bK7W%cdiHv%{r8-6-+jA#itWVC!kW>2P@mqK28vW8tngRXq? zQdHR3levXFZgJ)1^}4gOYl_ym23=b-yLan!k+R*@ow?cN*r2G^bFu82{Nmi&+}tpQ zw!uB@+K{=H4E1Ifd+Lf-x%2ZIyWIn>VhYZ^KC56Y8Q-3{l^o`+b#12Fb-J>t9o#e9 zv)Z%B{tl9EnbDE8I%_DaiyDL4A(1yhHr=e-;_k}al104L)lb!)nZ1eHaoG9ma}^D{ zHdA!7#+;TNafQ2G9hn_7hF$Atkl{6%m4)kE?sepJ)U`T`3~kEXO8$GY<_^1ZD9nNl znO!q>ki8BU#k$@-m{r)t8=%*<+U4$YyYq|l=8o^yojLAuR~GKbnz?31UN4m{9L!wr z%66}Ijb#mFZON?NyhomUQ)VZ%AZ_8=+=0yQ%<@EE)(*ehUDe}qFCK9fuW@a6HIBQw zGP~UK*Sm^0xe9Ch$!p=_a!>Jg*MO^FYi6UT!{sT?-R8%2Vo=JF~La zxhGts)bwR#Wi4&ll{;M7GiR35lD)xITu0^CxyI;kB6~Qiyeo69t2}4iwSBkV%$Yge zuHMY5n#0)>`*w+Vnly;lWJuvDRH%o1NW}IpQkJS+XIsociWx zw`8_28Fu#{IFwaen_HV*+m@T1o0HS!at~)!4Z4bQayGiNi&}EFWOihBQEPR(`?xtX z$FtmlK%lr_*fmO=w>`hhmA55xv1e|rd;EZPdkkdtWp1PXlo*&Xp9Ylcad*4dWesHZ z?cPoeH8z8*(Y5zb*6Ka`_8Q%--950+NY?flTMuZ-8OzM;+au7PS)P}-Eo;s0wYiNo z^9yqvr?YZ$a@>u*SwrrT%)Gk1mYL<*?!4UotW|mW`D0mIT<%0pb~ZKR%vl4OYi0~& z?QpHyNZoNLv#@x=RhGTORbH4A9&-2W-tHRQEtjo@E6a1YX6J2oucvO>OWUD)&ke4@ zz1;5YSv{FUnVp%mZ%nw>7=Np?`f2anM43aGbnM8^E$(q;hja6GxMq%K^4`FE7ysw( zH?U`!XDqvO?~xgv#`YQG^!_GlEzZm7%q}mh8*uHI(LZB?TBO(A?_QI&CTmrntFq6< zfz8NB&oxqiDtl;4$#2G*BGpca%x~Ma26M7o8wH3@!$Lh9(i> z{KgX{znYl!o#3m%{c|LL2~AeR`Pp-YPo#mqndhD&d>{=_&)mWjme8^l+&f?RQ5xW! z^G8k--b&2=yD$O7muMhx=8a#I{IiI;d|s7s7xG6>7ycDEf3fgx`%3vGX9*uo%=Ypy z@zM(9w}5|x{E-?dUrYmybNRBU@YTfZzda^A1YR5$eh=(P2%kZNpmX``q%gNPa}M~g z$gfOE{t+~QJm>c>6>cJC|3l!%kiP*u0p7Gs%Fo$f^yA>`h*{tF4awh#{4sDD4VusX zit8l*GVtQ%>E&DMg@2Fyp$6eX8Z4gm_i_TzZQT7I{T{wYya*1w&XvA{E-WV_oc~TxctnkgcpL#x`b~A=YCh1 zCq?G+wbuxr1MUJp2<`{(MiVq}`2p~JaL2WxzYsilz3}bef*XWK!DBtbAA;RK5I%|q zkZ1cDKNLP4JnJXI?clMW3U3DI-XnxC>kl?gsxFJOD2Gg8176z7)Iz{3^KM9*O6x1ya9q@G4@CcXPkwZ?^c@FQ(_O zv3Q5Y=Ny_|{$Yy?zLcJSvBj@hd~#uW`8zD$`>^!E4Vgh#<`KNqh4ij-fnUihcrM(~^94sfsp{vMR_jo@|QJHZ3s zF>u~PQvQIWML!F?keKVc3A_&Zs|Th0>&VY|Sh&n1`rJo_`TWN9?*a4qjXCEr$-f!p z=RYpI*em%X;6H&kZ;<@Y`y_w;6T+tvb9@uvK5*TWlK%mC-BZF#h21dkwK}5=L1FrxFo{@arKX!m0Mt<4v((`Md z6W-$(vA1qicrke7_re3<32^SQsPFTVUk6?d9tIDB3uj6B@E@f7{oq;PdB;h975Eim zu5TUqpT|r7X7E?PD!latvG+}I?~B6sf=9sG9-F|&l!<=kW+{I)xB&bNc+(#x-*bYL zp8#J3F8Gt=4};xf!ucmkdF5JSu20)5lK&#|6Mq)Yoh{{yw+NpEE(1RW9tUTXOZoD@ zNcnTXGhY{e7~BWe`mY1;F-PLrHZJAQB(-F*)@RcaP`Ax~^c(^?lZxenW`D@-5KKo?x*S=l&L9qKD!ufM0zvrLAtH6uj6@CGn zcwe|+o|G^AK=>EL?7s;79`gG>l>Bd;BKg^v$?OSYZl9T8ZO=Rm)a*J{@)u`PAzDrc zF9F{IF3Xbqx4;8#;iCDX&);1*3NFkR{s}mHPvMuqGr10;V%mGCTZ=jq~a=7Ey0{jUJb`vY&^ zM84z?qx>rH0cS}5+=C?lQgAys<4noV{hZ`q1jmpPqw-Ba5Yc4LhYp&&T;+ z6xQ=`7q|`OM-G+z?5~S{{7b@@gTsZwuCpY63HTgv_F( z1nc$TX0UG0F>n&)C%``k=ki3RwCq_e+s6aGh?qA`i%;^`fir@_e+2IU?^Pq^dqa|c z9JsJl_P{A1*IpD6nM6Oz9g93y6X>%arZ-voXi`CGx?PfGc~Y_azu zG5cG?6HU`{L<;s!7EXd!&lSFvn9KKo3zrIygKr_`cyf3`aay(@Kl@Z+Z>^Ls1g``S zohJEP!K2_~mWjQV3dyf0X8-vMgkJ*B0vCNl%C}WYeu|jOdwAkoTJAx97`*#=l0OT) z6uc39Be;K&=zj-JoGyG+o%kC*L-=xH$KRR4kAQP{f?Qe-ST5xYz-JJ1`9koO;8o{H z`M-eO=L(-tFXcCb`@!wilK(ew2RPgy<+p;r1Kt7tIWcc9PgL}8BEKvqyr5C^9pI;k zx%@czJ>+*Kr2JRTm;BBPg|7g2fnNi6gHLFZ@>{?+gLVG+!v zfBl=nqu|58C7jVI`B|%kH-Y#3w(t({>EOf_QvN64yeoy@0r!CCTqycM@D1R+tEBuk z@M3V~MN+;KJOCa5AJi`SYr8~$33$oX!f%3Cffs*A%8!3n@*f8;zDD@yi{TIa12BCE zLVE+vRey=(&jNo7TnpX?ZUcX#L(11)EBe>KZQ$solHUWq2V8fZl>Y!6xL)|w%cT4c z@QvUNt0n(+@S5)l2QQcME#DWu1U&x+;opJtdW65wDdpFLmxJ4Hl>8roJvRw&1BZVi zeE1cj@9h&l7hJbixEH+r8R5;~jGRoTeGj=(^kqj1*MQrLh1U=tLejw!;SZ6&9X$Uk z(H9&o`8R?$dW8P~&hZK#)FtKH{KD1X%7E}siP_)GknpqM`QVJJCBG7UB)BFb@shs=+*T$$4(>Wpxabm|>2DgCcCnW!F@Z9r-KLl@V z682mt`r>BcD7X`R9e4})abn&+Wi3*E*7fPU4xG~}`F{h~u7JPQQoas+4R{c|5j^(- zDWCN{DPIW=5p(m)w~ZUo-~9=cxgM~T^g`3=JR|4_K9M|cS_x9{qk zgh#+TZV|5RmHetc;mg45e=7VlVz%#DD_rs;;kG-4&jYUkuLW-aXZ=|0=dY9U%ZQ!! zx=VNz`SZbmweA{Vt)g81$f&pB>xWZ_=CbPgJ%s2@3&U;i+?Fx4(>fzP;I?71J2{JX$8VD}x8zXW^^c=3>we*jzq-uF%^zY1In?g2jn z-V8oqos=(lRO~GWC%_xPYr&trOUn0y7ZP)SZFx-eOTfM0W#F90CBF@v3%(q@1^feW z_Xa6{H@NW$;bCy)lfoYmbN?KETDbCViKlx+I1U~KzXBfnjpUzokCgBHt?*0W5%8J) zVt*6(USh8A&@)p0E#wED6E3+|@~cLL?*cc1kGN0rSAqS+9DmO5rF;vx_<7-HP=4@5 z;o_f(KKzpK6~wGB{*&;-;8|nB`wU2a+bhCx@c663tHGW(g`WV|{8RWJ;BHqI9lbL$ zzIMO(AI%hgikSV^xP^CsJ9iU4>H#UA*j@N+VlJPzhw#nFFWy`DcgRnG_xri%d-sw2 z6Np*A9?YMU@%9+qSMt|`cYH?pp!M*-pD=$;#`+QP-;qBGUi1qozZv`kVlF?nzv$mY z{<=Kj!ylCVjo>BV&ER{8Ilk~`rThoTUv+@+sSip15cn6w>~G_NlAkdsyzWrpGH`C8 z@FmI%g?|TLTqXR4Uy44vMz{ey92Ncz*b@^z;9)7>1FiMffcJPz@`u3-h`If{Q{pcP-U42U z^1VwX|1s5r3mzAHqu>jP*`DVcQvMZi7`$ME%`u*;JM3% zH-JaM?}K|AB;Ws}#Mghm@OQvfO~UsRbG+_mVb4>-IW5BH5OaUq-YWce@C5jjVaYFE zA^Cma8gS09B!4yda$@#31Rg~G7;;opI`_6qN@QS{*-30H!9z}JKO zerxIO91f z-+sSvIk@Hl;STWDLE&G6*F7w}*Qn@+9v5B)PW(#vm*Djy!kNF9^6P&qJR7|A_rh0b z{vU*&C+7aQ?nUA3=Y{kCBK%cw7q|`F1AZPn@w$}f&z-qH<%|nI0q)-_Jad!eZvnR| zzbW}6;G(yL^Iwqiv%pt^E5VsBN`BckDc=O%{EqNL;PUOl?w6!|75H@U25>Jiw@2as zO8M8pjo`yJi+(jY3ho8>g8RU4f#?21>>c_?u|EM`0uKCB@^1l`f!7jqd(H>HhWy3g z*vn#X3HVm<2>2Or-MeCc?>~vXb?*y*m6+>0u|v2Wob!S3AHi+l<}uNCfu9Cv{7cGz z=@rSJ3H~Oy2K)fH7JS01;=c~O7Tga0Co$K*8+_!Sh0Eyo3A|hb9t1y6%=Os__HGd# z*Yd<{FT*AJEyy3s6h7lMDZe30_BIU#2Rp2u4BjCB<8LvzEO7L=G zj%U*hvG+spcJT8k@7YcA_a7I1Vt3(H;EX+m9|RBVCA`O9rF?&m@LX{5-olrHSM4MG zI5EdFb6??%H$=ZASNK?qTfyzfzn7Tx>%gyp`#vN31OG?zw}9)xTft9($M=)+hin!9 z#rq3afGfeB;3eQ!!K=Z0zA5_A&x*dBnB%KENVp66Tfu|iqJt&>b8m@Wxe^>k{$=16 z@F2Jy{4Tf)9QvEs>j5`{H-c{m7aSt~UjZk;Cu|da3%C}t5C*`lV_#JS~@shvj zpHg1=XW$*k&zq3^p07&zR&bz9_<4&X?@D>iUt{qH;Em9q`<|3ne$?W^_tW$B^UJyu z#Q&v~&;31Pw(x5x@0lY!cZcY=fY*U%ohD zk^CWW#;L-4d?@8}!3)8&z&`+wg13M-gOAQ|IrYn%FZR9(E(AY8%>5?}E_4Y8PLuL6 za3%O|a9@Sw@0}^-dlv}Tg3Bv~9|z}rP56*3DL)Ec32s{``J>>nMZ!nBrF?#s@Ri_g z;7#Ddvm}4t8B#t0o(*0Nz68AeTq*w~cvVt3XE)K)FNOH@nNbFA178Uq2R{q0Jx|IX zyt}k_Wu0&vcsuwx@W^t>FUgkjz4gLZfNL9sw-EF8DQgtocMswD;FF2D|5mj~{)ONk z@ck&i3A_Vb-YVsf-c$4w;70KFHp#yi-1jZvzk}CYC|tOg=-V$6J_FqQ9pO&!#!G}B z1=n3F{2n;_a^X4jy)mv&2e=np)G7IY0JnjCdrN!Fyh8HN0~djR2o8XM1Fi$_LC&~+ z2f-(TXI?4xTELaywcvH&x4=8VzJ0NMu7V!C2K)ec6#ND=6|K2Z%I}{md@Xnu_(pIN z+zY-2d>i-|;Jd+p27l#Nv7hr9v40jg2#$fz1;@dcgNMNV;Ke@`doO@51!vOt>bO4l zfxirX1Y8B)3jQ|uz}v*$Pr*;zA^Z$@<(5@fH#8u_e=SA!54wQMBj_#`dkV=4SXwj1=#g~ z=zGEEfS(3m3EmEV6nqGMuafQO{9N?>y-ViP!RLd&3%&{bDtH6Jr+lJegNzXTov?>8vA6U27n8keuiv8cf z{x-1c^YSHM^~GTIUj^3s)q&Ms2l!90zs}N+TIF*ON^j3%u=bx?@Y}Gr3hbtnJNJh{ zu*SC$tnHigIkB((D<6Cy^u=K94`tw^kY5Ep4%`K<1g`mUaP>P(07A% z|LOz3j{NoD_rRlI^*0XI_AWd)z5a{A+Fq-{+CK)tdVJXg*5k`|ux|gnL&UzePXN64 z7onldt`r6{A>T23D)*s z0@mY8Be)LntOINNje{>je#W6;dT>8@1Uv*D18)Lv2X6&uA1d*?kCOOif<0gl zcrLgcyac=$TnDZLuL8G&yTGf#Yrt#43Pl0>DZ-du^KmQf6zX@Cp z_M9R1mlN-^Cq4hVM0geF6VC-tfP2Bg63Nf*kn&5wbHP^=v%P{#CI1od1bFYGB|qab z$v>F*kiF=506r7k5B@%Q82mo#sr(rD9FOQHzyrjbpL@C3=ig;A7lA9h!e!t!;Kkto z0XKq=_euFq@b%0&85zA`mtS~pr}(P_kAXKZ?@hmtxI*%G4+s~47l13l_kbJ0JHVab zvxB1V2Hywn0q+x%{9bS!^FH+bN$|bkA@ETWp@*BaG;7#DW!P!?zeI6%1WMBII z9QdOuMwv*=z%^Kennj@Lw}!_af~a2 zP6?B|gt$Ws)0)c{Qj(`#!WHzU$!xFdLN1w+aTn|c7f~ZfE=^bNu@I!wX2y?Vn;zaGn<$?Q*m-*`Rg*`G4KoK}15 z?;&R%FM8#*#AbUvY4K|o@A1|2@?W<2TNZ14i>>;Z%=OdXXZDbu>!*BVS-L;|owFIQ z+8?p(o6Pq6+PNfcC(^ThWz`p3dXrhNzYo3W1ko$2evPF!ne~&uCp}U0%0t9c)rY=@ z;4Eyvg8rGz_Vo9td(IYn%3QvXQlq2Z*?&!T>T@xdq|ce4*Wa_&lusQGeeK6tSjF*} z%=Yy6uXmB2+gF*(b38fxf)-w}-elJ6?`30iM6b-{9epON4ZXpv*WcG3bduAjTWoFT4>at$?Q*mue=@plutZ0 z-Jja)x9pkB_VoA8%Sq4uPg(U^2WGCn$*kAkNBcfR;nR{dft zev?_RzrT(`ulyjf#>yttUb$t@WVWZj-+lr1lvVGs^d_@jfA1YRP3ohpdb9sl(3>W+ zUVk5cGxW-tRCucTn8!D^XENK<-egD_Qzq0Dh{=<5c*}wk2|5oUg zRiEqBZfE)O@AD}$&P>M_gw`4VAU5m z)_2BlGV67Gf~%lcet}rG6`S}vjBu zYUq`xtJm=s?t)%1f92`wb^M1LpjV!*UdM}g z4SHqOo9)f8s70*DgtonY-zsc-h z$GgZ{Eb%KZBsTm1y%z8N_4NFd#RC@SpOs#Ig~cNlA9;3q`EObLlEslF>E$~ue$`@a zpX@!Pfta~{P3HE|@ju=rJ)a+yk3J{e-?uGhdxey0`2{V!V*4hueH~BaH>79#$}`SQ zw|}n14_cgGonD^(v48x$XwtvQ>|e((Sw?#HuY9Lv?>&pptV#EGm&Mu9^!!B@|JdTc zTdeVS9zm6$g_+yWWR73QTRANz@he|JZ1!IsC&}b+Je{w%_#KNEB+|>@X|cweGs_8g zGG3E8UL6l+_oT$DJdfCn?*@z2e%Zn4_DyE{I=;*}>?J=<629mi}RwHKD=n2gtCwx{Fg z6p)_nDXZSR9^!aRX1$KLvmAQm2d#LrmQ5XBKRcp@S8U&8wy)#&cuCLkDRcQkO6X3L zdXrhN#Xf%GW*x@g$_MW;#XF^dA{fRo6LG0kEj-U zWz~=H3tD)^dXrhN;}_inz4A-MQ}q{*V{NBBli8k*fAlWwDYJjx9&Sr-vQwXsG}sYz zqr%%$$4{yxHsk5AxZmQ97OVeUV}IoKFq!@9_)IgGi+|-)iB11%Z-8IW!Yj6CGTYPf zoxTry%3QvXQVu0vRBtltbv&pI&?~Dx+v;B?vtGxKa@R|JlvQ8jc;D&YWY+6=Q?sF0 zR=wFDE9gy=S+C<$bwaPK`g|*XlUc9hS-l9ovg-3Jy~(WC@vjbVkoqgD-rW8b^rp$I z*YUDa&?~FHZ#QW`X4ad`dL3V@7kXvYoAqVA$*kA$xZZ_cS@o;vGcI13S#L7yb^NXq z8m0cq4a8=DzRY5^Kgcg=;T79AneFR%V7J1)@^3ABZ(Gdvxc|3V+s|aSuj7m5oiFh# ztA2}{jL^c&@tMqe9gplx=#^F9=BRh-O=i80Uv@L}%G1^BcxPLnS5|$a<=_4pM?YV>!FKW+Z zwx{E_eX&{UqdZ-`j`y|@dgbZrb$qx>pjV!*UdNLgfL>YkQv03tH<|0N<-gJ*^;cH? z2n5W$Jxpf3j#qax^vbHI+bn0{73)oAy^e1;2E8(u=k15xX>$9S%z7OU?~APxzq0Bp z9q&8qZ!+t3{JeJPl~vzm=}l(6j<@$H^vbHww)7^mUdQL#4!!abE2i$xLzX?0*`AK? zcO2>YdPTWo7xwZjdnU6z9UpK6_LM2h**<3Z3VPFI*6VnJ`(GgOC{I_f;}4z%z4CPR zI$q(ApjUp6xC3?PmD)4=7uUyRu8)p)ctD%fM_Kiw*7cmpPJ3W{#P30`<0Yy-7Y>=( zzsaoE@f5c~uUxxw>iU@H54LAA+tcwFJ4ny{MR~VxrrWEwn2O5zl3&omE62XUY+uKB zyr1-JUzv*)QZn}k)|pQZ=Jv*A8L9eX(9gg>%{!M28I=*PeMH0WV>do^P z$8R$0bv)8m=#^D(?jNi-ne{q;=^M~1t3J<)-(=S7c&BsPC4S}U>UDh7TcB6|khlZB zd8PJd@e455$7GI2$5*ZVj>My^dbP>SdXrhNcjzh1^<=KCkbv!be^*Wxew?pcuta=RZFzMf9*6aAYKZ0Ia^^4`}i<5ejo%k_c z?_rmUe;vP9^$9C}lUc9h`<@NGawoC5ebgRZmN*NqD(IidY){7rz8&_IxjeV$Tx)xp z%z7P9xbQNmkFx671~1I)-(=S7_`^}?l~rHlsCVj3X1$JAd<*o-s;{#2CbM40H+~m- zW!0PK1CHNh*5_forKkTmDUEy^f#!H1x_7#8Yigw`I>{wx{DS=hBTjAD@&j zCpPV^wOHdFwd|YB_H{hxr(s|Duq)E-tGz9jJ(Jm=guWY+6=-ZQV3`Y5a3^vCg=%z7RF z`%37QHxf_P9>v!2*<`k-Z|w#Exf9re%BnZ}AJ^Yx z*6Voh-foFsS@q`q1?x>_y^asx3B9uF&GQ%QO=i80C;tlc%BnZxWxdI)*YW3%xmMy= zR{cz?{Y+-Pj#qy@^vcuK>-hFtp;uOYx#izv_P=>ZcJSHPN&L#w)$92AH$ksFUA>OC z|2Fi>)79(v{By6D_?1;}Zm+4fpN{8$BlOD86L+BPc%}9>SnX*tx2Mhr@Hf~~R(-Lh zH<|T1f55xYEAO^?>UiobdnU6zoqwQ+^nCtR)_B~O-elJ6dJW&?~Ec%+j08dYwOEIrPfY)$4ood3-mS{p z>wFQt&?~Ect;i?+o6LHhU*e$eOZ>{JH{-3KH%(@}&PQ<*^vbHwvHY9NdY!-GfE&cW zvg*ezy~(WC`7RovSH6q516p3Gy%y{K(PXx#^JQfANIc4_FW?ujS3$2$X1&g@aU%4} zsyELstT&nUIv>ZQ&?~FnY(LhU%zB-_wF(4L$9oQv%ai1 zne{q9NGJ5ls_%A70L+emgITZhiToLQW!0~;;y0P~I{!%7O;Uel)o-`9zsXMgn6Kn! z=ym>*XNjlkPv-L=u8+xVPv<-N;twPqWsPSRzo3OztT&nUIzP(!&?|HKLQ0MNf)-w} z-elJ6d@4^uugv9HKbL+3&5P@D+tXyWr}NKT2z$yJ z&t?f{Qg1Trb-tQ)&?~FHpI^|zs|xyOGV67In?FLY{DoW6>vNvP4_UnD57W!DKkiTY zZu&=y`Zt;V>wG%rlAf=3lvN)V`J~=tr~a6K=Q`+hJ|5MZ`w!ROWY+6^J@^tk&Us4pwz?7uB20dYzAHzn@6_%Bp7@ zyr|w}*6aLDr$Vo+dh`6x@te$go$u*8&?~EciMXHiZ!+t3eyCCCl~rHPFKFQv`!|{O zI-k^`eNumAF3)u>r^L&oUeICI>-khH6ynxtj zzX6N)zcW2w{qCP0mL~dajT1BfGFS#D;0%6}OMcY)|JiJ8YfUQ@(`QjGy&| zls@Trs`H^e1AEHT)$9Ce`FBY?%Bs($7zn=HM_tk?Mm&VycA_2%;`j^AX~>wE=ILa(g)VavbCtk?MszId<1udI4AUiNP? z>vcYaRnRNnMm$x2GSAO!&t$fz^C>(Ad&-pM93QgXQXyv6o6LHhf8jmomA&^(9Z$K{ zJ|?q0oxkBC()0FJrk74UWtQG#*6Vx^4?wT1`Z`N*GV67Ih!3DwR{embH<|T1pM>XU z62G$QcUb2ilUc9xPn-|Evg$Ei@Z|B*WY+6^7571}tojMdzsaoE`7PdoURm{9toobG zdYupBumP#Rvg!*g|0c6u=g&A3dS%snEWOFB*ZDTCfL>YkYxo5%ysDsoCbM4W=XeZy zWiDSx$t*vWUgz_8A9`iguOb_~Ftc9yr2AbNhr8+d1;(F04XoqOS25FfYjphDrC=Su z_M2cGzxHadj$hjg*70k92G;Rwp9JgpwJ(5m{MxNx9l!QNu#R85-vbh#j$eBiSjVpo zgLVAcQ^7iZ?YUZ>D@n`wU>(1<1FYlM_JDQ#+I3(Zzjg?$(0UW4*+$+5}j~uWbYC__bYN9ly30tmD@X zfOY)ZVX%&0yBVzG*KPyr__gj|NPTqt+I+B%Ut0{;@oUS#I(}^xSjVre1?%{=-v;aW zwby}l{My^VI)3dSSjVsZ9azV&{R>#fuYDh^-e<`hzp!O zL;kgMzA>KUePA6=GUFl1*YPBCiB0`d&Ns$`yc4YBLAnMdU&n)70M_v!ZvpFgkZ*%^ zJjj!NNqV~2(D5E`VW#mObv(y^fpt8`Qy-Rm9nbL&u#V^GdPMSdJjZjuI-cW0%ru^( zj^DTqtm8N44@tg`-*^^S$8TH@*6|yELu|I!yB5=-J0rpWu$=H{`gn}rv-n=(sm5p2 z`JKP@n4B+_|3KWK#%ay2`21sDzw!CQWIliC{Lue|J>}`@bw24uk4rqts_#Nf%gg2cEUGt|^*Uemi5tYfawoCb9(P%+_J{ceEo$Fnwy*PFKL`8Df8B+> zDls={&t$fz^Jnkzgv6`N{&{;1T6&XNuk&pm485}Ii!8m#PJ1nt`?nPI!(i2${iA~3 zG@11}pZ5mnm8Yv8gTF(cl=>^HzS6p0Q!b_Qe5%*+xaWa&Jnpl>Iv)3Ou#U&g_7nUM zOFQK!6lnc|#eXB7YX8&u#BX~_;#Yo|*z9jwpAq=v6}P9!+@3lgc^P%QLW8+H_lI1o zJxn(1(?)u;J}+84K|ED`bbjvPUx|NZ_3x$_cwy%FP3HJ@KJT-i7QOPL#2t=O{(b4! z>D*}X!xpRmTI+e3$?RX}58sy>mfKIcp4jxS_ST^S%v>Ln*`CfnJ_38ns;A3zXWEC45>wN3`{Z{I)d@6AVwCbPhGhp>^li8lm*S;3^lr^6DRy-!N zUgviogI-znHJ09F*6V!mM?53-QC5Av6~D=>*ZJeW0lo5c^*Z1DUC=A5-aH;u(3>W+ zf1RIx8}!PmU&k+K;T7vmX1&g5U$#-|ugv9n`vv#~ExcmA$*f<0hn#P&fnJ%*v%XKV zC-o+?e#zZZ;5X1KtG?Z8f0J3ixnJ~q{Z8UnR{a)=ffr_u-(=Pg-7ES9&?~Ec%<^wC z>w5=8e?9cdlX}PdPXA*Dv%dX)(f~5c9uWQKpOyM6tG>$`Z^LBvzjaXbXG5>7 z`XPQn3$H5ZpUJFW_ps=H481a!=k1T_;3oAZvwrAt(Z344vg-Tfc6w58GV2q+5`Eco zQh#ODS4sAy-elIV#{gxkp;uOYhPC}ocJ#j$2QR1|top$2)CjaNSI|F`S-OV>0Wzp#LrO%BnZ7Un}TMlUd&b{qE08eUw#S!!Kyz z73)oA{lx1Me-e6SF3<7jQQ}4QCbK?gT=c(&URm{h?sUD$tnc3{`l>%j{K~2ih{HtG;#XFEqvL(2f0Nn&2Iwz= zURm`umfqw97sB#e(kB#X{pHQ+TuwaI{Y~NjO8d?Dqr|T~pSZ)Yr}3EAkKCRnbA1|N z?+(~gRy}W1UYJ>LvRR*T(wp@OzMRgd5KmQ~)$soz<#Yd3R{!Sljs2U<@%KW1?4Lxh zta`T-{m%GJW_=&@--KRyn79L4Ua7r4%bv+>Z|*;2`@9Bw$_I^2ZLi<5XENKHfV}|e zxqXy1o>`XOWY!1%De=@pudI5rJuB!kEK{Xd~sZh2+u z_B6LI*T-b8&l1@CA?dk2m0#V3J#+tOdnU8J5!l=3Rk5cWChowt;nmkHCaJS8_3{f^ zc*XWjX8U#T%Jy3h`^tCj!d{tmy<;-lTlc=$8-_h)_FqV8KEI%aR~(PYte@B+`uCt$ z=JKrnr1MeE2ckdr&(dDXs;?s(yfCwWli7b8^gYlktNxSxcR^pWMf@wPzQysr)4$2= zKjU8#|E16?tA3`XH<|S_p??{AW!0BkdXt^{cedpPXg5FgA6_w}bl{%l_X0XoZ$NB{S!}2%EPbkp(*uSLn0^+I82Q_FP*XvS0<>QDu z40{@HHr`|AcunT|*23P`VNZF>F6>F`?A-n)v%Nal%NUpVl$R1u72hZI$9CAe6!w%y zcVRErs;|i$UpMS+gFWR#|C%0OnZ-30Uu^MDEdG_nV2z&Rzp7P%RGqt^ZC%T>SnauHx!`|Vf z=lUtrOQ(Kjd9I(ytj};skL!S5`GsA?GsYU0QDS>0v%RrQvG*bDDWAVJy?)=dnC`CdJ_Dp7b!?3sSn_^Fy{TEV7a4@tmbA3%_eX(0C zwm`4^0C9(-lz(bZk8f(vWVRQEy_aB5S@q`ehT}1r^<~f>{Fc;5dAjs?41jH%9Q17-&q_C zEzGPpnf0FCB%bdw3*(jyGdZiT5*aOZzJ?AU5MwdjtG}7G81vO=f!odx^boz@9RfFQl}C5-+MZnf3iS zqW=Z-%Bo)_@=3kPtS{bM^b^o4tG<$7(84Q@-(=RW+DG)kze{^4b9s)xml7|k?{!F+ z^)vSseKYjRsxRXgw5Z->)-S;Tkp0jrtDa-vMfE0|`Zso=7xhlP$*gaO|HIyq`YWsd zVhGf~$*f-oeH40S)lXP@lUd*Q8EL=oL9eX(GD~kV>$gDv6!glfH;syF*9>rH0;_rKkTmDUE{c7m1gI-zn=KUi3H<|ULpOyMQ2)*)l;tu%c)qej-=VL8S zSbU|$cU%0l#jjed@%PB>+2sCbGRI$cki_r$r?lTBQ_Q@r+9~nE_PGA-474z_ek=6z zpjTc=+~Jgze_WpJ&9dS#ne7!F>~;j-hdpK0i?yBcm~85|??P|hA8>t4W_=j`kD8G7 zP*(r5;EH@QEU%=)oIr9cdNWz~Pu{qJ_@?}T3Y9pVnuomXmaiFLhXGRISSn8fq>_oY6{ zst@xE*yHvvnf0CgC3ISn&?^rSoAJDC@p~43VMls-^*`=d-?=}V%>EmX5dUSQ=ldyT z)fey!T6o3ro6P#|BGI=)uYCV5;xWgUV|ymEy&Xr1z2{+1d7lr`>sM?sNuA?guJw7g z$!tHzBlb@vJ=a%R^&7adv@mmgCbNDc^j**^tA5b(Z!+to?bEWG0OFq!>#h9#oS&?|HKLQ47kf)-w}-elJI zMnr$yhZ4VXGjWHblz*!&W_xVEj9<{gE4FVk+aEhd><_@cGM8t4Hou^SSFAUg_2b8i z{_oH$b9vT}QQ}4QV-5+ket4GXPtVA7wvV#vC3mOZWY&*Be+%@=s-H0Yar`Em`rTdP z-_}Du8NbP_-*B8n)C9e<`d?%DH`&xby9>SPkK@;cU$VaDc=7Mcl=zjW>tD-Xw+p@L zZz}&g;Q#N?D^J&d&sQb>MOhNR@^tlqGSLq}udMn{ieJlr)-C>(Kdrv)Kh(dp3%%K2 zxcyD$?Y|NApEX0`SDtSCTK?w$P@l2ef95~F3%yz2sp?;Mf^7c{&?|p&_sq28qqfIt zenAVb*q+JUo*A>nUK#26e4)%Q3n_W5>m8F>@0la|Ug(uoKR`BkVP^j(vwjQoS=r)W zS@nzg1ueW{y~(Vfb+Y(h3cWIy=lIL`1ueW{y~(U!HCOb%hF+P=e;WPKdr17Yo_+J8 z{)L3u|KL3F|2^oH)qg$&syCVSqtO2qdS%r++tl!HGV3Q!5&ynDC4Obqo6kQg=uMMZ zpK+?_zXiRr>dotY)|<@wTtizOH$(pj^vbFaTmDUEecpU&&pmS_er460?Z^I2W_=;_r$eu-`cJwa z3Paxuz4C{|9f+M*YHx>CACozrz-dySgZGyDD4$I{mA#pkJ(Jm9CG2&;p7O?B*sJ0| zY2g*Om&t6euR`M63VX_@?lX0KX8qWn$!xC(pJ&&Rp8KCNy>$AY`TT+PCbK>OeJ}LN zsyF)!>rG~T9rVA2URm{(+!(Ylv)*LZ4?@4^z7oH(>T6g`i|T6}5@!9(E2TYWL$9p* zfKi_Po6P!3=v$#zR{au7Z!+uGL4P~+%BnZ(%l=Jf{SN5=2EFoh_4BWi`upjJ1HAo} zr>kEBeJk|JsyDYc$8R#nKMMWNp;uPD`FjA?EB_Cb=SL5n`^XLJ?9T{i?zo5zfe5m7>nmyywu`Wi#sh|ZSjvRzSrW1Eq=-3 zzgfHo1#iZGfW?Pd9I$wS#Yv05Y4J4{|HR@4EPl%37c8ca!_(rY+s8Df?w-byi=Val z4U04Auwur4h{axukGHtO;?pf&VsXOa^DJ()c$LK$TYQDZ*IIm|#kX1fkj2kf{0EEw zWbx}3|IOkFi!_w#ADrZnXFki*K^{Zi^qYc$3BNSiBeg zF37C!;T9ifah1hqS)8=E-r{dre7VKfSp0p9Z?Sk4jot8VN*7YPh*CSH?@+p!(j}BS zC|yeFGD??I>ZEi9r7J02N9lS>t0{et()TIdK&gk)jg)Sp^aDz3DBVoy7D_*))Jy3{ zlzvR1UJ%DBVx#0ZKoow4Ty0 zC_PB&AxeXkeo5(JN{>)_l+t6A9;dW{(i4=Pr1TV}VM@QE^faYkQyQW48%n>W^bDnq zlzvC)SxV1Q8m07mO3zdJ1Eo!rUZC_MrI#peru0WjFH`yxr7=pcP@|*E~RTIbyK>QQXi$eDACgX7OnsP?fL)g z?OBjfUAb~z{j!$oShP8r2!=DNn_8P&s*|Uv>q-0CHR^jAjE0Xnd+TzJZws=~UvXaf3>DWis z)U9ckAl&WjEAg$!W*7}xZ%hT11RyI@p(~9wSNPFXq zXkF{hcTS!+e^E=cC0RYEzPVN6o_uRzvMx#CaA@Ds)u-2Au&h2&ePT3zUQ1InP8&quDyQvD z-BI_Eof%Y{@@Pwx`U7{*wD;##S~#yh)sXhq{Am5s*67mYB5K8Dsbz7tlvZG2aw+YR z)C-%8@W^FVvaRJKE*D0ZrR^XZJMDL^+_{u#tDd|t4V_4wMK$&Rwq**jhK81A zYVXF3>S(NiYa07zvWZ$Mj_nk$Yv87JR*7X4N}{fLB^}CAO-Xt?)l55S1_vC!fc8LI zC+k-@Yd!`vq!Lj&{w|NkYsq?ZLmeGesyPGi@k>5*S1)rm4jtB<&bvIhJYGwlc)gqg z<8>q3NiJXBu!6svy}Y&FSvSy8AfuZ9J8?EPG%su8qI7s$N*RqU&LOWl z-ax%9L5IVp2C|v&_IkckVT++t~k)8AT7RcrgGS$TUI9c6c_p) z^;5hXr9;my3_Dp<7QY812_iCqAnp=-4W4ie6B? zA{qBa7Dg{PBN;E8UsYIDIJ@wK!jr2Cy`iRLbFyhgvO3k!RNdIxT*QNl9vXOG$}yqH`+8Rf(r!KEKx+DGh`o z@uXDc;|w*fZ19(qP;-}*I2RBw6c5Hcv1B;z3j`9Oh$mzj3iv(}k<<7kCA7WhlBc9( zk+X)ih%Xh5c|vi&#~UvVq*7w7ly^_MvI?J2*H{~kC0qRUNp6iqIFR%PqOoAeQ%VwF zdTDPkSj{c4YYb)jnNtI{^i~=DjrSxLO!3hlSN{|a3aZ z?e#~zr7=$=9PlS1dQkXJ%zHgzKVBPU2QgpJ@ApI!fk4C`ip0>Dk2%{rr+V62bX`OjcS*i6kRv-?;ihmU5=}-*y`>>fDoR&ltd6uOw8#-amDBoT?0CS$2+*c-t4AP{hRR^h^v=t9#7)#yp( z%bMxZZ6yccr;9c+75AkA0Z%j*E=|P!bUcwxI@vaK*){bgUp!e?S5k6f1KrZeqY+dr z?vHr`aes<7q9+pZ>vnW1=7b;QtCDK3r=^ja!`TZcicmaW8cIe2(Rd=@4@83!g}=Ea z5t2KaAYIQ#>jJ50bBot^YH}rSpDM>?prvUgU6Se}9_~s3Z!$@ThEOsb@+JH@_WYa1 ze4$Xx7xTtLzCehs$*~jc!k8boT-EgSsg;lMrO9xDF1CE7u|SG0J9RVb1$Aj?7p+To z2d#B0BBvjiw^8B-$|$YYx&b;GB4T zzH(=MV(YRxx?y3{^H?0F$7&7E{Yq(Vpfnx~rlQGU!kY^ELmu=A-iBTe-CE+{=M7ag zNC!wJlg?vG8Bp#1=`0DIGG)KKFeiNJbdfu##HZ7%xN%bv!oJ-QdDV7R*Qn4s)>V%$woQ{}gGa^qu%abXu zcX{-@WVIYUIjmG5?xPDHy7P;ByxxEjR)o(U)qEiHdFZ(-pH^lQ>^!u1`56@*!ujaJ z(ImZ5Z`c>7%PxOB;7RMpJ~WO~)>n%AzIl^}C2W1#>b?M)j~z>2X_m*6>>@$u+-Qof znrIhHCeZvrxiw2WsB;^Xlo*?UZ#JuFi^!hn91pxkHW?3ke5H{H#qJN%R*vB`_+K)f zq%B3KBD%SZ(b?Ot?W{+-z(-H9d{Cl0e&+%;{pmqCMklvGC`kt&Ki?Z*n>j}@Dev>m zadaPd6pJPN@q{<&Nl_PwMSa>K4J%$REd#4$WWnX)L|x&(uPfH#huD7jcNhKvauR1?+M}c!O9d%-n zc#OJX${S5YaTzjsWfl48!?fvy#8-C|5Br!-5FTHWPnVu}%$M>H61SB5VCowMng z<~fxO;<4h?H$OpdFqMdfVpOR>+@t&7zhl7nBzU+Q^E!-*JO{7HlTJBHJmAy&$l z>;6zE=uP;NQN7IOEiTvl!503wj7}WR=U{jO>5WA^^d#69i^UUBug|#&pukG0d@q`psQxY}raqD~)Q|98$d-gqn$4)X1dHynt^^zQe+ zV?4mmjuOc*JAzzE%9kpolW)SG zz)fXHo^s>Rq#sYx6K;A+zu=@f+{NY3*t`2tH zK6IPjmMz4TmE+ke*;;H$k)=17afNilcM9{nMbq5go@kPuCk6sOPco8>=}F~ds$qOrX__vQ zbZ6%&4F-dBgz|g++NE|~4SbvC(`*g8`-#Q_p^(Sx57W&yt`$F~sjB$<7pgFx@RPE#q zc)g(nKb1`Fy!rg^JSz``V8W_s8=SmXnQYG~455SD-?E5$OC$7jAnv74 zZ$c@(Nt-lO8u`fGGg{Dj=y^uc`ErZcPNkNWl+a`Fc#}LCWeq^a$3tROPRl(DkeRxx>f1luuZ8`aI3~EIr*Pec}=hQ3v-$=s_5L zImPlB`RGG}kL@VZC8G5!>B}7TD?gz-aFr8YPtfZN_)?K5-8^fRx!wMqdvRZc&hDPl zP|6eZ1>=(~_z`n{-Zv^&RyNR6u4I!pz}>RC;ez_f=kuOOC>2ab=y_J4G)A8(O0(dD z#3>#+Cb|32KYeE6d|&4TeQe9yH)1}-rO)}}bQ@Kg@W$v<>qy$O8S}Z1^TAMashm-) z{hZ3t;{bf#NP9bfddT~%^JS{EUE6%5a1zI2e#l4%hF!aJ3SEM)dkg zL4@y6oUaH4@D;aEJWQYMCt~!Bo4&@9_BcvKUh*y!5ftis%++;cOGn%)95+uhHwfzK`;$GWZ5_p!j<71zLX6^A7r(Y9){%=OCI0ZqO- z+b7&a3Cc~LqZzbiV~_H6{31z~_)){g=jv_~tB3oJ7-dhDc4RY>ze-1V^_ z8pEdYf-r4cYpsmCo@va-6Ft|$)C5_M>&6}t;``{(Ob_1B*iU;ABt3(K#Tm1{#S$IQ zi3A4C5YQz3fCzn6mu=E?=S^u1)2`PnYg)oFRn!z&14u+~{R>=~Ofema5-;-b5>-ix zj-DFj_MB!1uGb_iy@If%tixKuD9T4 z`9WPPV=&Iptrelj5Y$KlpWt?Pv?j6ueRiY$Vq3v)nyg8O_HQUUK7G-js*8(rPF|CUlO9J{@ezcIc&>{%_^~5vS|KP ze<6ZHV;qH|^oy(lRGb2yavO7@4YNuU?jbHuoCZ-HW&pjMhV)U6YSN`R!;&>+-1$L6 zSkWt@paIGm&b$)|fd+rbUAN<45*Ca1ySraV{d!AcheWQ9T`C~s zQ4{7|Xi*C|QGZF|gub`^xh1K9)P#E!1(UoY5I~)aMzX>77Vi!@YUbbQIG8a)7G>2& z#9_1A%uqk<=o1zCDtK}^8nd?>jsYS`XdOi^Du-2|^+uI#GoivE8W)S1%X7NsM2xSroIzbLE5v%zUAQobk8@u%g=_XlPIkSHT1d7-rcLsP2FUtv18<{6_ ze@0^ojcxNfNp1;v^^fXLBEJeuD)yAPF+c1c9}EuT z#V!F7qV;f&e`O9%GvZ;z&d_}_+2kq zjitg)&<>H`G7R#lc!iNdHl3!3-9DomOBVN50Kyj5g9+N8Cde$Bk31+sra0h%YY?v`STniM`H7 z_r9OL6Ku8%%8mpEObdQ2fU}INtjO_Dn#fb5o6=UE-qi}GX6kbGE|FTrrWsm5|VF;nO3-YJoiS7WQKfS0ax`jW%8RP zECFhrB@Y76Y7D#~>!Pd^rnuu28!dvic}9l?R1i8PX!iysy74UlM|;2RMItM{SbR{R zY&nlA$zr_DlV(*&vQ~-k1jL+&d1c+2=cJ{8K*c|X1FNVo_ACsei%6ZA_v<$(Vz@2| zx}`Xi?dG%EKs=6b*}D%}y!S4Q4QrO2RRi)IkT`}HC9DbT*yh!KQ$Fd`WOj;C(gmi1 zu(A=2rAnLlv-$aBy{VN&5=UHOEm^u50J_kXAMvO~!fSMsgPB%q{LKo=jEk-$nXL^% zuLkJ4MEd1 z>zjk~oex82PS*E#KS!TXd#=vPupt{4U~L)J*1MwanE7u6$+y*Zi?azXg@5l>8;zpw zGC86!4+NIy6;)P&0ysE5L2M^=X-krFsDb}VitN8NIBO+?+Y zPJowPgmNS>M=%1B?^*;HZJV^ss-gXo*&*OD4wyf<2N7)+jo z!f<$PaO-pGmf};T0lWsaz)d5NPEnee1FNYdV&#}NF6HjJ8t6}7)*mFMt3k*;Zrtk4UR=!u*Tsf?!Wz&(>CK>U-EG$4R_wMH#l9*by; zLEr~3H--;`Db&K3G=AY+NW-dO4sm&>c~x{5zLnGn#G!elFD_+_p(_bak{n2qr8AqR1=a#NK<29UJ{zTsA=VAKN2HRM0siIL_xzacs@qzUw7r!$9>7L6 zE~$Fp(0n2~hZ-@^x9+Uyv+PD7561E2==0UV?G%-M6O-WvbkhgPZ;t1GdwL!TvQ=H- zZ3zo_1~PMr^?-hd1lF;q7v_u>tvN|JF@9vjdj^)#lZkq5L?-HmcD2O)gC;{z8NS4% zgrZ93*7sIhlR?zNg@be^KNIwvS=e(_M{&smFGzt3$6jnL`EyBm2zL4$G-y+j zw)QM){p{A_SPa0-gHt9hZK&5^8_sYv zk!U^UXmYo+wT`fbil;X zi-%2vX4#;FYmebfmZDiYGDD%$h|tVE#8WohU|?p>n*>=no8I?-X+k!Qe7K`5n|0Bf z8S&E=Oe{uMger^g$y`TvGY4ZeRK~grgKkD4>4AprlwJEfp z#ZtrJ)`lAHae2SCv}`Gcr4ON++(IRnzi!Q3$z7X^MQ^9|8a+vt5r5_mz-Q862I6-! zo!8F6iO<=ydcakr54H7Y%iqNB4F=5oBvTe){fZSNJQ*Ulo4oX5E;V=FT`~IKfE7() zKM0_G!&!hQrFHdOzn54ay(|6;4XAirRbNnWB&l2&hAe9U32$U`S8$==$V?-9xJ29@ z-_9&FA`~oP6G3@~^CEhX}$5HX35u@G;~@8-3Q#hs2^pk7zOsE^Z}U z93;UUJOJke>f016CLw_bFD342i#N7H4MM|_nCCK)ImZ41MI&?>xQ5k#JN6~9I3o0W0~&C1=`2bh~EPXfY7=z@*cRMuSfeW#O=^f8hy5_sos#jOs- zhw9?&5hAc6M437EctO<44IYM!Dzk&~Q!}U~cy!@Pg98G8N)_>Cze#Z1rX(?G$(86G zJ25pUNo4pcTM|Vwn2{nf!mV~G%}Uq&@Dm(T8^|o0kLxvm-96L^;5%)Xy8GucA@3PlLmvj3d9UfIa2Pz zv>FX2-f*6w(iYwo;O1f-M({(Spm~7@!3gGrxfX2y{rxKnp)Rio=rD^LkA2X_p3P=f z0#Qv_)P2Qz!~nEs4Fs7@!{b3}k4N{B@Wa2}zj?j<@XP;v^XkjzbSXr@iWert!w!BP zYEcM$$X3+7ODBRh!VJgms{T~gu>H$;_9mk%;QNK^3zu39AhL5Yo(@{W8p9&Wm`)+5 zvn;5c&P(3wQ00A6aLZ)lv`12Bgslk2=5D-8l6fYA&oZm(+K`!e%@R~Fd$~@#Msm1t(Zd+Ows4)H|$uSn;rmONJK$HVUi)|=5p^)F_$j#mrZ zT2k%`9HvM%%bU$a*m>l@?{#Cezm&=;^T24}*(ro%ir91|e%Qf0`>9eDmPIQPi9RT< z1VU70-NKnE{s<^2otL_l8!h@fZAov>10B_AIx@Tgqn?^R>(1{DO6EZ{XEO|v_kjB2 zq>Rfff~3Idk)y#)CRdI6^yG0rx<|r&qXaiJqIlw5RVLA=)Q<;o<_sE=<>Rdav^2o! zQBr&|B*_a6tVhslE792Lu@!E0P=8QRIA@`<$$lo)k0>AUg938DeRF%_s?$)}_zv(F=Q(Iq*m$h%aB?;rtm6Sp z0$@XcETf_hCKkW&_*&`~yyr3!T!y0)q6W#uV3_gc9|T{;xg=6_FMs__k>7xO6N*?u z)O$#WS%Nqua{)*YnnVU-#+-IP8GfXviu&G8wZ3FSv2PF(WS>fUK4Df|zx#PV1Ci}# zaG^t4ULAOB|Fu|c4BtWewYgvJ2+yFXb68*$1aVGG2(99{#ynjaN2-CC zPNksTV3U*W5Eu9obUT?&Hq>mmr3s$;Ad$cq$TUs+e1@fsZD|%ci%AORVbyl2Vn0 zluy9Ehc^c(98nR-fBBEmVoYwmX28_1Sq97l0~mq%@Qfjbh|}VDfLX?Bpla$dA4AYt z(WJKL*p{wuPUEo*RTom7Y#8h>__(p_Y~wCx| zUz-{2V7Z8TBQLqDpr!z1V<+Ekv)KzvlG~%Gi6Z!|$pE%Sj&vw1OwEsB@ehx{D({6@ zv!v@sjR6p{gy7dBclgi36Vt3VSxd{o-%`}ly?a* zs!tMJRDX`I*IJ{oJ6N{JPvnq-X!8YxGRzKM0%Qa?o&}~F&(z!(>eW{iM`Fb*C{aOj z${#vU6eMccF*wY*%ALI)y1}AzeC7C00`5nu z7~p6|LAh$%CHaPs)cT|kbKToruMW+n{T7@GeEy_;@{uI6-i)iX4QE{{*_#2ONWzgL z3(cF(<+c%EWe2a;i-lrzT`W{)z*0$`mv!-gd&P;1yT}DCE@W#P^?;Bf;~icU9TvciLXWdsl_XSBGo4)hH zl`e7um0(eE|9ZaNxBI?~%~qrN^fD|;N{wNpsuYV~zVP$!#e+=8E{VWqd|iFJgH{Pz zS4L5k<)-=0rKA2_5qDhQJCj2i626hvtHr}h`i;ADahy)s(WXQ0oXcpCK0f!Yl5jAfSY&-l)H60te>LJgJ|dP%4V%>hbZe(T~|_Giym6W91hCHPb?v z?mEo-zJFsdNWXute-xWmmhG83$kFpS+hDZdM$bBzbJsy)HlqpDYWCULgY#*E!-Ih^ zhu}6uv4Jaw6!bkc&c2^xy`VQ^lp@@;;+}bi<$m>IXUQeZvXc}5Z7Df(0q&=)OBd>T zJ5d8=;>5_~Q{jh0zgP06<4ag>fR<5nvRun;x>%U$Kngq%e|7h%=`_4SX;}CK#C+=W zbQuBrPLHEF6P@mF-)EWczPM~*m52WftEKU{07oKV!mVq%-QU6ejz8=Uq~|Byakm>EH)@}AI$Gy z8p4V2kEnRs%P$*JaF)y}w~;{RVca1p5sz6{_;Mfb9($=eRkhI>CEXg)S`PU_lT=tX znHQ3?KIquQWnoDvwfd&1!$iH)y~AXsG6I!9a>2ZXLy0&Mjh{G}dlgy6n})Gk@H_qmZuYV-Tmrp+o*3YB@?#4#qqo`c#XM1}zr5`xUx$ z%TH=eKB2>6KbnT6)LU644K4uqZz>Cpv$t3>wkS-)qR6{yx=5S}MTDM_CYAzxf{UX| zX6Tk4zJMd7o+BgVhWsQ7uJqqd4ww!~e6$f+HB_atk=JevV^ozI!wFYEYYLPd6ae5Y z7}xOn4R>97V4~(e0t`ShyTL(hZ_RLom=4^l)B#TO%!Ww2DLeF~=>VrKEP=J<;_~#) zC)0!gZ_?CJ!Y=5N+U>+)$P5?fsELzG-0WdP(WgIE!$Oc0xZQ|(;Wi!avqPV92BnCu z)ZbOIElxU*Hhdp8_mhC6G;`p=0Kx!|jlcrOm{7J|HsQ+!KtJ@0f29 zQlz{FfRd6Ek7+eHmlNO8_uv>LbKrx{w+cA@@rY!QdQs(T#9Wddb(HiG9puiVtzhN$ zO+^y+Abk@ar8H#AS(k0MH;z(QdRMpBzkXeb?Z+(sDu|XxXMEUG;Hsd8!mgw1Kby|t(08g4|9UfZY?12U4vbVp)RDNf!;3X zwg%l6z-m0-Ko|g0UD&snqzI@{N?{}hAZy%#=k_g!R0~rt#F+<$hgC_=Z4E1zqA^?p z0n!udfy?F%hYCq0Ez#p3_9DxZbavlByXHAD+>n4dp0CNi4MOIWXC-Ysg=&Ww698NN zjufZWZn`~FUTUoE@K5-DXs>O1Qo6Cg-cWGH+ zaMC1{Q>FABxuTWXj8mQP)N62EtD=25m8dZ$gkM$&`pmZ5zvm~f)^Mc%rF@LK%QAe@ zpCwsefYd@3M;$NVk2u4bisA76^i#9#yb-DisJt08?*h!jFIy9dBsm@e^3l5q?^+j( z`MRDkxw_=*aJR5CW2m#igV&a*-R=Ll1?Cw-gk{UUM+-jeByH*?Fgjtyq~rc)zsgW; zMvv-6mzxtn^A>smnmUmS>%BF($Leyh#utJ8f-HcSIC-{PeeoO@7=>CIAc#bdfTMKj z-3MR|vt@uGzAVD5XtF95l||3@nT?g6*Lo4I6X4|Z&3JzfG&TlY;CLaJn0n}j)xP47 zR=Pl0cXb=3ZZJLnTg?6Z?BvyKo>PDi}2Go9_iHtn4picxxLP0 zq}6W2##0F`-1HQ42euDA4j}o_D&#*`%^f{d^bcWMQDlqdBApqwV{=BFmPLd}aeCAS z{K}O61AB-hFe7muOf)LVz?*W+CtcX>8C`tLkqAw{JyW=U#qZ+98z$eFo7gwW0x@% zu}7;k_$|gn9h*0&HYy)d)Wrwr~9u$p*<;PsjoyV zqD`V)cz7%(LUwl8+<&Kw`e;e4UCsdsQJ7_fr~&@tB-kG>c{&C22@AH|9nh~ySy^(E zU{dvL6sPr7DQeRmx3!Uuv8Jh1}~b;kF1WJeWUOEqPGp)Jixi$Ofv^!m}vOzfqY z(~su|gN54lp+$0z-UKnioHkq1eY;8Z^`X_VTphz@k#fJl5eC|rDlycqH@A$_VES2e z@hWM#1qdsd;N7bq2CNKBv-JmGA{M+H09`{u`6DF7v&mT=22ID9{r zf@M-CJEV4E8UiFD3)VvPOl2?muppSP_`R51<(h&%aB$QxgO~%a4l1LF6+vt&CNO| z96_a_^n*pUMOzF2Rqc?Gd&)T@+S}xy137sH`sbnPi2sO)VoDB`yH2Zh+(_WWz{iin z8V>sclf`CPt{xw7^$7m#6MvV3Z$2MG7l;wY=W*?W{nOjTU?_)eown;C0OmlZYHr2#L-N?ER>T1<8gEnbv!n=!0%#c0EfI5oQx+Xa8Q!gAXm_Y4dMz8)| zKhg*+gc5uVDv6mQa;S?Hdp}LFb{s1wc#e_#DI8FvPPA=ZL8FPk1P4rEiNZwaJWD4` ztr(UGNkSC|B5_3lLmAl+mbc^Cz8$xBkOA`Dln*bI;1g>Q%v^d?$)u8^KfB)NJkkrdF+{OO6=KA;#}wEI59CD(@jsTawGuvZM+O zu_Nci;g*5$-FpSLiF|ZOC(>dI$EhNLr?QGq;ssGAa!&!R?a%4xEve^J-R)|8#Ji2L zaIvtU)QzZhi4U$RjR#!>#OU?F*l1#rOSK~17c~$i(*yy4Lx(|Qo0#6ayIeXfSw(gb z;|(Y5MSE_3`P*SGBa#Hs=_&X|v2x6FOCqBy11R}%>i6pP4hpfQa_ND`ReyUU{ylLd zsA2*l_0{NQ@fb50a49v5E<-54wCVlb*fUNnp0+kuQTiFQFkM83~#o ztQ2QY3xxa<8k&?$CyF^hUP%TIo(0{sF=N%X(I9nUwgF2|m565Zzjn=TqT1UN$(J5z z2t3k&J@5&G6*6DZCX@4(17P{5F1BH@Egv@k2n7gbTA2T>H&yMXN9uQ{Z&mF_GS;*Q z7}Fi*ck;Bzx&U*jJC^BscdBFZl7NZ8uG~M4cZ(Wyv7rD3d#zSveEPD`@4en}QXIrQ z2}6p$2i9e;rpPGnP3FTKR@a<3;j^E2rR>viyF-hoxCzDE;JszPa3i%gTIFzAJsH{N zpliGQ7aQ{)k69aNBl8`^8$kyOs+2zD9u{mXGe1oo;CuUE`Yc@%H=I=TMSUY8*ip4z zsc{rZ=pU#l1lce7JfXh(HcdcsgPQ%#OUERI9dr zDEfgBNst_{Qk3ec8f>ieHa$ejlAP;Wlka7Yw z6lRu?=$vMUTNaccfeOEEP+KT`dHysE8b-Bbh8vIcY^*2ivb{o&bDvr`@UCarL;wP9 z_PjR1U8LE+#!^GwiJg%>=y63M9PE)KCX`JMuC>+9Xt;MQVybuIyCdz6a_pFM=g)Aj zSzuQ!5h`LTxHWpWh+F4hcq)VZ5MYacAz)+Aw+K^G6 zSq7<~yzBM1|FNWgZB3OJdCUH#ZhC*^+Hn6>f8f8G&AO@88@uh}Qo$9Ksonhw=Wnz6 zO5Kvbszwpg@h|`JUb^hCEKwIkC2HHQ!a0elNQs01KLE+i`*9w6X;zt$djQLiTaq{o z`SEu2nle>81XS0Iy{4222lebws~&)IkCsrqeUMo-&WNqB*@U1I>%&~N&-r!3-IIHm zc^>7ACC$`v1d0}4kn-mR3B14k@%bA656*Mcmwdf-S7at(6``@X-lTWwNIRv z!@}lJ=#-2o{1}#3x=r!>nMN0lG#6pG$Xo z?wl_)lu!ks36uc|nOT3MiGCT`-1E$i7qJpLzmLV{aaG*C`L}pqg67{WNzhP+3bPI` z2wXL>*(6!d&(%v(`veS5^3z7L;1t&{QZ0bAhywHQ9CX#Y<@yh?eg$du7hr}+&}}#> zA#MvA3G%!x1t#I~_m z>mDxzO$GzHQhp_~m_ZK8?!FfYnNTkwt0Ciz;G69s)9k2{Z_T&rc4PkFohHkvg$Zqz z?^E%{^2+tPX`Bhsr$%tv`1F)Jfhq`!Tsg(#fpmq82Z|ReZ|$Yiek#{n!5iGK`5x{l zz)k3RFs*?nKkztIY=TeNBU;%dIU$OKoC#jCc*0&cBf7@@DfJJ)1TN)oAYQ1|CsRY* z6H3Jb-fygX2z_)fT^tTGWOE_e2FHeQ_t*fAMwnCnh5GF*pBdNXAD93EKy(?!gue*n z78n~wKwRNdlhJ;#BjC`1XjA+dCa9|otP8Ef37A5lisuY$IUd-F4dkcBKZG4X)yNt+ zgapkD3%U9$%0^OvG0O15aQlXhqeFEYalV~=v%6RA%cgPj^4q63 zFJCX;zW?=uWjrTLk8oHE&l8~&(&jvf5fBSSW^PG2HoYc8GrfY{ZZ^VR*=$x-g1F|d zL}cXmv_>d})T}6l(n+NR?}yb_L|}8P{!~=z`5jPI71fN@-!t{!Wh=Tz4oDRr9lEI$ ztg{GlgF{r8O4rPgd_(I;?@5FhUjy1g9l6>8S5ciM&P4=B4bX#q@3b(DxlB#QiSeGo zfC~Vg^9{j77ANi+9Z=>Bbn_cBi9z!sEb)08Qqhqlx1-ZB+a1h`yQDC@-z*TGJk6W5VQ3HDS_CykO{D6%s|`x9E^wBjAB33Kr{S3IG|1 z8oQ6-7ofPsJ80Puqti+0K#8KTMdYv$t|yeBC-IDis2~wHP}>Pgf%9R7xP9;#%Mv;X&bIPQt9z_jv53D!Va z$OZ|c2-WTcJ`Qb|b<~N{EB}vlfKr&O(g3AI3V)7hM54EmjVspDJl;An0)(zaF zjh_`l@eA*Rok~A&zo3(~g60+n3-T6Ri15$W6u?%uJ|l=i?o?Pn;pmMc1HIAiPu)&+ z*A(}=ht6$DNFAL){hRV4^uL#hGgPJvc`Udh3f>#<=F&98C#m;~x?ZMb;8R-95)d9} z$dCFnGiwo}D+zfzH#$WsabN2*b*n6kTsnmr&dxBaFur%UY zD(X*7EBh(DQ&4q?C{iMG8Mi!aO!hv!&L>1?WgBP^ej$)Y0tTQC3TkcxM(;{`U+HUk z?`xc?N96=TF9pttMPEtv77O>7%2XWWI#QVgdsqysucC*btYZp?w1kh1Yyh(u6B&<+ zIvxK9ED~VGzOGcsBdl(G%-k-(YRQ>3H`G)&0&rxKmW+k=eUveo;I@*SYa+yutyy1& z+f1%?6bOBcbB-(&_70joqaZ$Ncr3^d(GEfpqumsY?>LLeTOSPSi z2j`Yr4rI%MRe(fYXzcVn>BvYbE;+!q{?UvE5G+#%4xYgR1&chI$<_ur;iTB2R3zjo zw$!z?R1(Gy+NE;Cyohxr(3?-P!hc}i3v6wyVV~k?fo&-vjKUU4Kq}juvFYncF=LLYS>({0d&Ef%!21XAW71ZK` zPEAm#_b_!G8K_0oZO!FHsxYae1e7QW1Us|q=97|4@tCeyS5f`qbQWXQB| zCiWdkv^>EiCxj@+5}6tr7`}rcBCr_`@?y%$7Cna*-B(hCKK}!fs^U8cowlHJpB*G&gGP$n6!8Z7i{A5sFD}6G0{# z2;2_XjBgUbGI9}xM+gwB6^rBEdmSKkYhEo;U{NrrKMAyy1iga;cV3N*j48Qo15iXT zGgZ`RH=i|7t*1BRb}R}tP>afb=n}8x580!50{MygA^V{PO$0C~phUS%{&VI>B|)b` z2lNSSWd@h%-Or^YX)g3 z1EEoSrB7{IyGhQlmP#)RxjCh3ILX;v0eCUDMP30S9Vz%O07SxT)v|8Baktz75B*Fw z{JYgR?vk1amBTOXzU0{QI=@`*BWBo$P->y#hoaO@jLF=2w^HpWAAazz`C3%a3%@p9 z7%%1l&rw5iZOnaySS>tGn%-^3-8%frcMs%iji3DJ=8>X6&cX`WlmgT?a>S4NEeGET zIsDfA^?p@?=iB;`6ur`F(N1RsB@h~*mr(e|qY|SnOghnyxb--|zJJ^NwY)d&;f_Oz z!l6`<7>TB0Y2}`H-xDL|E7TtVgx7T?O%*PQ_4X@U2s zq7~t~9UYh=K(*;d7aBu?M5J7r?3B&JLvQV(EAWKZ2w?dkA0;25K~&W8a2uP^=^h zbyGVHEVdRPPrU6%dTO@K1H;+McJqJ;g&5WdV4V3DiE&yRV9YW2$aZ1xKoV%~`_tNB z2%zN234{?6?2@tCPHThk7{q~XgZn%I(_noBV~J;vobT!R5LQsea$YHI~Ijj$RT2YJo0QKq3_>%Ja)&x?!q?CKN(vGDulWSrq^HVaWUQjub8& z2)7`H@gO_2nZ8d9GIS+)6Q#%fS{~Ou9_SJ@6Zv^M?KGp*I+0oDs=UJ3PHM9fO^4U9UJX;oY=Et5n>Hb-u;cqNzoSVPwTs00mb zuTCebAquseQ+`s$m8zM}iGZ{WjzLmFD6ue$U=;uag+@_pbs(0P9 z-E$ZlzNZp4A)~G2_Ng+c2hjSs%5g^*=Grh>s?JFFZ9Sg#SlL4MAP*M?RI>h#}+j8`#fWB+sb5OM zH*i5kk!01JCz{EXYfF&rye)wPV091%}ulS{rz! zBB+Byk%7R5kgn!>AG|X;>3j`ZDsr_Vz@nAKzvwshfBo!Ml>&D22r>d<3Vg%p@j(c8 z2K0&ev?7g+T5i5BbuPhwyjQMdUkq1Nf$ge7Dbngd1BDvClRJP1G=mjQF+zy+0!-ny zw3|p@wrZgziO-g{L{wRCbMs9?mJfH>q=rykSEm}u&?gf7@JR^^qEtgrxd!HRsn1UE z-$nO?6an)*u$fg{QKkV_L*kQ!Da5gCanE`R1^SQxhd`Y1Bov6y7!rCUa$EQoAEcj5 z5Q!M6$!ATF6|1{=_t<93yY+S}=prx*6mxCBI>R}@Zgsobf7^QBAk(7CV@*+zHWo+0 z#2bDncl{rH=Z!!B)oQ`2_483NtWxZakSXX$QoF!P8X;!VfX&kAehUSQ^nZNK986aS z;_g8$Oi2dnMu3Ky>YdS625=L05yFGydFi`oDbrmM*OlxA3r8_ex&H8dp@>K(oLNRu z#6Y2Nd>}=oOY0iGR9gW^PNphY(~wMBJU(WyXEZxV-D=TKYwC05AXu<9tk5{4;hwev ziyG)kJeUCIGE!J|e-C08dP0?0r!XS=@XLUjw3^vIumttllvH(9QhZZV0guC8tshnb zSa_xW+^eppzL{2HL}ihG+ApW^Sw`?OgCEO7#ad(YF-*lr{XsxY9UuJ{i1674Z|@(~ zX&7(j15V3|;^8Ev;akGp4J^r>E!q$CYIM6;cQf#zdr~J{yf84??e6PB5iPDkE?p!)4rV7#FT2WNMH&K#p$9ad2v@?*5NukH7grJHh6Ns0PqfFS_ zTGeSdG|++CN!=N`11Arpaz$WKwRUIQh>W+N~vzTPkw~kRrH%o=Db^F!bZ?y50sF zuhp%${F~a4iT1l;qf$Fig6jyJni7Pl4y46{=ccHy?sBG^FkR+-FgeO+r?UY^SDZcv zPb>3Q$K^!eQV5_7g;N#v#o4VI%97og&~*0B{E&#!q?~hbloEo3mInwH#<+Re@Nwww zDmV@ZjQ7_g0}5j&E>o_`ehjZ|VxABvxb2~;%7 zl5*CB_X5)QEh0}k_+$(4nr{mQxFCNE&r6AxNJ(M&^^1C~k2 zNwD-aDX=#Br3B$qoe-Wk3ffzGBCd=k2C{_H%Wv?9>yD&aKkCi+zCT7El1Vvw(h1X* zDDv>#5s)Z*lfKVC(?1zENhr=KG$(3D;IOr1Womr;H+%@VWts>;psI-?L$US*bg~$2FSZi+T(%?=;i+Cp1uI@I$)W`W4r@Eh%7N6gVK6*;Hk6Pqcd+WK8{}hA z+kn^T*r-RltY~usB3StlZiO05t;b%>!9Tj|FIrt@Kj8c7NTUAifabQs-YrW{W+y;) zXr)LlCoJjgp2KfbeVsIq?Jexzk}IqHJwk$xg@n(IQtQ~kWcGDxzP?fqb|%rS@j9^7 z1atya=|Kihr7O7lz46pozU(N;*A1n{c>^^ut}x)-5RxApiu3aR)8ze-59mXaj7bfJ z&CJ`XC{`?)FS1jRQ*c>e+|?10j^0k`^*dMH77pqCtJF2}$rJ<5jB#gE+Q&17_YYb8 zx!J#2Bz7LfFkP!C!@X%pslY(CBT!%xX0X5plbv@#Yy)KAD57>S-N-o#M)WX193mg)_pcDc-9P@5 zYeT76sx+$XLx|{r9&u5}Q4XTV3?O%0sY;UA1}401>fa4@k>~=le`z_b3m?1_(vwZz z0nGM`{{!YG5dc2h7#OiI`_|;S4Ew$xtAIa6S`sYQ-gd@{7SF4~;|WOp;4BT^4AiIh zENFxPB5I(;7y~I-#Gx42_W*e>5)elD1h0ftil8*F%`lJC;hc4=bl{?zmd@lnz@A4i zg7_(fQaz^+GrR9ie}|L*0;`Rnq(3vp5_Sqj=qpC9F>;Ke2*0}`AH zC4I6IAMxQ&;xkf%)qz%&-vB+05SeTd-@=m4-y}oH1DJ$@^@KlDBJDO>A8AvAVISCY zuceR9DaA=a9%bt_6%cX~Pt5SNj{Zb5BgGc_R9RDa1d04${g{S~<6g{!X%%JIAy5!f z^E$>D!SQCnWRcMcpJ->)Bd-Lennv)-;NZaxqk9yaTngw(l*>+a>yU$)%JBl#u=53J zrB>B|K}r5sMklFm*1P(3(zr$Y<{G}MgQvI#t3qX3Inm1Lbci&sIW{Q*n{Ux%QQ_^|xVQdo>8S(wk`v`@-n8?V(dx%3Gr!2J# z2Cv4-1DK1G?w#@l>dVPG=_m+hQVBiX=1urI{Dd!IH`)?9ETU28Ie*hY8d1>!MH;40{-_byP6s7LZu3;f8T(nSh;_4e_2e__rjluif<{2} zKp}{NdcD&jpBi|`oZa{2rzn((F4g-GZWJUe00W?TT!HT7aGk3h#-V#gjf;O2PX%`{ zz`y>0GAr5`NF=VGs1ytv0lCO^o`_0!PWvmj{X*!UF+T9V26Y?_?WLLa{}46c$^LWy zP~WnF>Ix`t$3fCv@#nPP>puZC;_@7x8gDmTQ&aP=uc^VlIz0dVG*Wnv1LGUww$sGg{X&;zeXvw-jUYJRu{v431A|V|arPb0v9lH+G9~ev$`h=7#0rraNgaI)%hQsSH z`HA5=2>dq$Y|e?!sCngHa@|rNK5)~Y=uK0Dmk+zK{a}`X z@*X%$0KB99;LVs?=a7RC4;WeW6``btHLH7HafK>5zMQxS8f6bI{Jx50gtq#`6T>jd zh5L!V*bsezqU3OR5jljlVPdJz_=dT>%~Y|zI6WXUs1g93BePJ1xB71gBih6= z+Mj{l@nFjl&Ps|OMC6RwJ6P`y0e~E`gob7sDLWu%VF?NvYH?-u_@CYo*pA#sFW?+f zc*-IN&KMdIJZh>@2_i29gkTTb={>@Ck6#z7k(^qYB(q6QZwTxTFg7YwG(OdeBYSSn zN#+kB#E?3)utq^6W3jGhA)DYpPC@K}nudp`vSIbJno0pSr7(t(`9Qk9#ci037Dv%- z1c0HpyqGkP_jV&q*V|QI1E# z(lDw!aQbaO#;^Er+W`63Rf;D@Zw^jznxG|~g$fx@y~_Zc}eO;DHg-faakbdYF5lS^Uy zu7dB=g9mCy{!LD$osjzMmi0gpB-p{@%-dj&In%+l29?8OgLv59KFV@3jqnCjT^$9R z>|5&En)M&Il>b<7Y9_V%Gy4}mz2k@8Tp(efz`>8q4GgW+DeVj=Bm=ifv0JudQ&acy zn!6oXFa!^*H{9JqnsUX1MD`rJpMWg)Gd_8vfLukg;8`c(gS4|iNRPUT%-4|OR1Ctc z0f7dwIti>?aYEP%1&Bb=GoWm+$a>k=m?cnsk^0?Go)B70-3xhC z6~%JSFOG^QIM8nTv7!l1yiLzH~SiM1Yn)& zN1$&cM7{5&^Dz#bfI>`q0X|eA5my4k*2%(q(c+(qVbTi`m&TFbKkp$hfpkx{Lychk z;65_agLS1BqI6hl10tk~7?t@Egb5TJOT}J`mlAI=neJ%plKb6@^c~j#kXH149A9{+ zum)Yf%W(}5DkmivW>z9il49!2xzj+$nJbbBW=}l0gx(4ta)cw^s38iD6P6SXI^ZBj zywOl8HH8I0h^&ZjxV|2zSAGQlLGh<7poBps#K3+Agw>!xbK4~t}s|wsSqj*pp*-DR(F5Y4bqJmF^)WP2t%Tl zl!2?<$lU&EKhuIipvy4N1?PL*&8r(3e~am zjy@m6M9GW}5`|g@Jtw?k>Q_s6uv9y=B4`1UDZovm9Ojn5LJhLlN~Bw%?%lWz9HtW0 zhYK0fZCLIP^(4@3=suvR03Hya9ld~m5Y$ypWC+4)Gz_N&j^8HH@t8nIo1u`I2B0$^wQC zOiaj38Z!C4MR4qc*|87i$3EzLM?dKw{iGcac6pj-5y5W{=5S&jLZ%E~1SS4p{Y=t@ z{rK#I_S>@$9{uUajApjQ*gQzcPl}%;UY!9i1|lH@Fe>CyP{pf-nhR!esj&itEvtdYHQn@35`bh$pUDH< zhKD?c!;;iDcv_%_CBO6d2kob4TkPhM4w0(fkoZatAc7$#g+9Vv1BX7i_qh8spiI1j z42dDYCY7g9O-D5}i!mA@am;Sivk!Kg9QLt*%nhtWsod&OI0-HV6eH@85$>qFV*_6@ zhy@|sA{QGq6hD$s&j-|-${?VwyFSi7Xs2iPK|4Jy%BWDarCd2=LQ_1dVbD-|qiW)z zOuIhLK4?ch`=D<(>q}cqve7QY04K#6scG9dCT92}Un9>QS+Y z6xSgMj{rj}Ff$k^``tGCpxte=588E{eXyIzxjy>#+YlSFS7S<(1wfoCL0IxGtT@!^ z;tDOhvorgk?d0r(wv)3D9^c8>4hP>7IYbl$reHLICNE1%5)KK@6wu68>0VsTK4?$) z?1Od#%|2*P&g_HTojm`f9XBAg49r+aE)p^iJg!J$t`P8-q{i0!%W3vO-;QVgNjsqV zC+*&tf6{J_*(WM$d0Jyb`}EE zkROn*T$2qX3>23}Vp3mlufEJa*nJz4#OKu9vX>?L6n|qKJ zsp1xGZ;JpHNa$v^(Ql+oi9JLe7F^V*H6V~l!9<(KT#%OK5A)vP2-PUY1v)7RD1C zjXNKVhZv5ZO2@5-<6i~io}ye1XD}G=7?iuwQxm@$_bVKKF&g(P9={ljw-KUs*ok<& z8u_?i`NUJ>FXrQ}lC(1VBqWRD-$moD`4f8+Rbu?^#P;Fh9X*HPJN{id@w;r?KZw3Z z?{a01e>X8xFmO&hH}&rL8^F0o?`9Lrm`&^ne9_}~v+?5M4H!R{kLLhi#OtKuj-L!BM43;T#Ke>6M#D8g-y2U5zZ}mvznu7bXqBj0C!#gCXCLe|{8`tR zRWxTGOpbM?*-p+n)2wH3)=_2qIO|rj6FK{!H9-JLJnet7eVl#Js;#qbB|Dhe2aosH z*l*8!VeH4}pR~(8|D-i5W}gggKW9BQb{)Z+_6p0KG>P3vri)0j{Ug(`mEQ+Zq3;Tt#La0pxth> z4_fIm`(SKGGwWfo-=2NYx8t7mT%eulNuPhx4rumCw7VnQ4D7L(Z}aF8p`h!p&-z1b zH)kI_xxk*L(d?69H=?<(qtKd#oPE-6 zquH-VM}Hk#Q-6Kd4`h!o%!QIX2$PD`KdyZ6sTeRMfRlF9l8i0h8mNfxD4?}jeFqPx zygi9d%`VB%t>bL5czO45Ta?YCi{X9vxeiGYgm-yqD?xqy`GvnKIvVnC{Mh+NIIQ4^ z{PMp)zFB_w<$u0;^#x~4ONs+bT7-S!|E_GUII?xrSN-VY&$l2vmiF)7?;l?O@$Sv? z^_ySCEmY&UhPSMMVIM|Co31at3cs%IqK_XwyyLM9rxySWs)5Bma8VN}@8gT|-#&c! z$MW5WSO1W90W`sa0!I^7SjchHVeK9yC0K8>H}C&x-yoVsAvXBlz&nsDcfUfb#WtH&Q^7m%*wWK_!yoUz*h;1Ga`}%p|67Jmas=h$Dk7zV$FTVg_Dw(h@UQo8 zUYq6kJbInLJJ|>&DIBodM4p+sF&1)ws{FB{_K+!kDmZi&T~||m{qoDp(MO=Q2LMF+ z1PO8^cAM0R@kjQ6OFud73t0*w4LNKzA!(02vtQ31@na+~&`|1$0`HX~$DbcL43fDW zF`MJr!H*6-7%&d9jU)wl-)yj2#{igCYd`w%$CrbW)&z&(fj~`-wW>|M9-Jf#+L|Uw`@V zNgSq9rlZ0C+J@vwL^iiYTI&_N_05V4OIEkyZvEWwpD*P&GHnEdE3$5+RE&GNPHQgy z{N?4BH#A5L9Rb2R;EQg761wBvLw{tCIzU8@`pb9Ak1yZ9eRbgMP`MS9VaDcxcM*1C zy)@%}dEL$b;_>eL3qiH1Gd}*>6;DC`Q`cM1e!GzsHm8WG15KPPulnt%A^Hx(?8PU3 z-2Meef85wrWxz$v9~K7VX0LaQ$8BHpJoj54AGQ9$_TT^U`%>RyA{5c2T2jXV%~SKS zBP^30W?+yJfvi8?y|$8{R{~ODz#FF&d4SR-;$j!YOUq@A*o;IA;Yq3cM*1q3VI30r zG`L-9yxS3x)MC*DERo_R6iK`Mg9B2!I+Nx$~Mj77H{nDVM_@FFz{Hg$n>HF$} z{i?JLKl=UU$B%E{|NS##dG(t}Tw!VpfwaPVC-7x;YxNYA1<-6v2?-j?@wZd*Q6z9z zaduWYtl-uopOQEz#C&9?XbId26uAICDJ2pF72t7hVVGGuqJWtN&3}9O`L{1Ge|aYZ zszFdvJUqb-S__@~{uIr?w{>nHre0befXt++we`GFd49U z$w-5^t^$X9qY6&kkoFt4qHqNujU%wzM8jiXVl4z)9_56<*R|*k>CFvf>l=t=f ze)aYC@$To}{>qYN68%l8rZs&y?G>ZJVose-M&p1-n?@zI$x}*ZU1cR95u|KbK}lJpK5WWwp6Y>=9RqI-zx@R>sq&xH*d$L+ z*w3%hm}<5^H&U1oYUn9)Z)Q@t2;^ulR_nz=(fTbGnq*L=`dgA#(l~V6i)p5IVek+^ zNopFo067&O0IH-e<~~#%2}d1KAK(7GryeTg{abd`0`4;Y)cMwY*m**){X(TP>be5_ zdsnB0K1e@IUm6I%iWaLN#A*fGIJKzv)lIjZ#+#0RHC=joPd?Ga_J&>k*Zw0HpJJJ* z8!EwkTd6HG?6fkRB#m-n6J(>gH%scM+518c1r_Ii`9cX{szxgscc>y>y_hZ!>eNr^USA@?zYC`1qi(^@;WL2M!Jb&>WEQ8eR$TL z`qrPC?H>qPS_X9^VD#xdrKK{Nf+3`h4YhQrh;6INs2nD>Wx{bqo!;iCWkn#tc#e(p>P1JBWi}9L zWl|%av@LW}Rf+Rlp}7|uoJy zWJYJaEf!{Ok*g0BPr-4#NMWusEFDFN1hp~N@ zadg)?IoHT{t-|hA(ux$E8n%iW*8eoO@sniN<1@3!TF8odPAie^HZzw z&NIDhgkpEYO{sb;HkDaBcl=OIdgLbMWtD+!7k;PV7gy^W^&aN#167EqYlhK@#xPVd zCay0V-PJqrL;0Ug^(ZR5Rs}qaA?Py5XXKJ4t~`)*$K5RZicVtkLGVL4L~=;I3PpdY z?@hGWO$|TD41MO0dm&{peGF8(3srx){J2frWN%SdCWNtSkyTnO#M)xx@srvmpE!oE zS9Y@yMsH#1gNukl-Sl=m@7+q%!f>uhl$~5;ZM(XbshraZHH?l^jf#}&(#we$ySer< zE|Ot~?N5klN2tk_u8T(PB@8JSjnKtQOc7Gq9`Ce#BOZN2&D{yQ3QG=i1DqbPKv3GT zg-UBYkBh|(|JSu@oH*agwa`D@sQd8d8@fgrOuP+TeF2S9frT4}HoRlz5b(1d$-Cgb(0-Ti(JxC%5MhnB>GR z(eeV*ph~DVj;VybX^FwQ8;b&w!% zon)Mmj$VG?gU+|eypC}+XQVrmEgFozb<|nZXZ%;}uDFykce@xH4E+pGN#AC#DT3Nk z@V$T*o-H>#`RP&Kgq{|uHO^nD!lY(V*XwY`hhZBN?J6L)@&VoqO$c}ehwFAoi|Pc~ zEA5Dzy3@=8X5;}9NFwT6GObv_(+>+AS_MZCCheg{ zk&O*pB~WI4o}v^Z*|lNquJV4@0*}DWD6lA^Oz_MPb}iIm!$I5I%DA-g26P_fg{G|+ z))UmdE?h#n&h2i~>_a7H!V~fXaR-Gtg|SJzJD#@B7uFP^UtB@}ynNUx0z3~6^#t3K z6i%`ULT>VN4R6^?O6J3Tdn48Amx!^=THUD^w^@C=$l=%ZO?w9n>m^sv#T_#ih`G&< zU=EsG1+d>-+?4i;^9mwwBizqz4b&4wC@LjS`NK$_vs>Gq^P}YKgt3v1qE#1$#eaoq*iBkZ~6N$K>zVxX+w>A7BSzK^^2HZN7 zYZx)wJA6Vj!y6<(1nuae-^dCy9g?lrFn{t|{obww`I4NiW|t-_;0CgkV@3ySo5X zu;WIG;}AsZuBSfq;>4Xdy*0G|{2EJ$33sEi`j`ClT+6t*|9Zzk)3^0AOotp7d`Q=r z4^#_N#{Tljn3Ni98fn??9aJ}U;?1wc2{a+~^7{fNZU`5pWT}KO0MLd$6k1R+TtlnU z8~0jR4s#5I3r~i6U`)QJ1|?v4DFRAbtN8AqrGt|W$oi!sdvtK_n$i!lOE!tt@8qtS zi0o2j>bct9HUhf3r1efh$#Ivmv-dkHZ;#f-PhPF5@%ERB=IXmu?&*5j5_f2}tWq0N zpf!cd4FWaFIlt781pO{k&v0 z_B&Ob#Y>v%k6pnCuF{&qWMC7#oT0M#Z3p^E;y5pOVyReR6SUE%VPoAnu*2D$KDdV$huiu>o&Ipo|v z+q_8hcS1U|bHzo!w|s=q9?M63cOspsfQpF;mv-h+`B;dGFP6^5W>fsMNQaAC6Yq_< zg9WB=+yJrX6l*OKoB>kcQEI{`usjp+N-wy6_7bvOQ<3F|TFNu8vVB{emFbaXzse2z zR_do(vo*TsqGe1{-#QE_*&Ye*SKwC{PqRY6UIxtFk4ca&XvOQLTz+=ZOr=ZZc(hSe zPIGZbt|(}Y?N&!81wK^hGTCyE5JwX?7s{fQg>1#>VY zBO=lNKDbpgn!erjzsn_0#GaHk#*B;pp z1%wLp9hk?(YgaW|GaF7p7f94Ymf{msrL7++CiL`#z$wSJT>o5tFYb17h;o1(&~{K4 z4@^7+ka`9Z9pa#h2{8rHH&@>qKYXcw_`Ug7Hk#XEFb| zYDi7LS3W{22o~ha1RxIaQ3i&)-g#R5fPczIlM+l~1bl!x+!VGLf0V;y;s541$*Z_+ zRJG@6Ahh!i%7q|1ZWM35EcI|u3R7*Ky2|cCZn`V)3_AI8EBn~gn9d2LxA44hlLT;P zUXbpxzW_9*XDnpQmxEQ!oaY`WJ&I~jJpyi7vd)y*@9Zt&L*k@KKB+(aKpnoVHp+Rw zsbKshMLVRJH$e)sxQE~O)%$COdFFM$2;;D%*m?z}X<-o_hhL{y7-wH9BG${Uk*LZG zn6oJX6baba08gAokc-9r*Jc}f8c+S38jHTsU%fEkY65wkpoEsWqJ~*elho6MU9Fn^ ziHk&a3(_v^`b1lhy`M=2sBYpsUaIIX6mId%JBH|()6h|s4X3L6cKE{k#f6kmTOVM zsYJ#3Lqc*(qK7l6Ve{Qpu0{HjpW6K3qW@DKkG@@HX62{-RMvg*)Hp}h15gb2R<0e%1<&|&*PYm zD2OFI5?xDm9Tt>ng58$%$fg)?8a*t%@0HKg?&BB%1eA#YdO~@dXe?v&o4EiWR?U_w zXWgXwT43l{!Kx70HcJhGEWj_yK9Yk#RU&f}I&+hL1}n$2(cPMoG$am_&`B*Aid$0& zgGvYEy>SH2X<{=Bzmql@L<<=ONl8zRY&h+Sj=?sP6)-|l9g-x|cDz@HUnnY04iQpH z`N9St8hk{WFJ;OEkZZDGFU-?2b#n7sE+Sfoeahy_yOOX1l3# z&nRLwh}4V$i27#zO_J|`p(*8n3SN>fP7!uUx7&a-1gv7OFookL5@l=F%nLjVdy9ZCS*_{R?hpgYYf5E!QqDBat&V+aGh|N!zo7_cEcMB4LA#SD zr;>`PQfe2}0RYtlR-Yfv_SO63M>-`KQHcjbNy!)pd4vT(h$+RV!h~P!OA(LNjjm*+qVi<9YMcy92#t)(?f{7&=e&I&)6YyhOiNGIr^nsP?W z$S|`RSy6_l?l7blkX~_0C#8~Ze8ToeVhx-bG9T*kbcu}Ne&-u`6BSKbW3Nhuby2v6 zlJCyM0EfR$wn@9yST|RMR?oAQxE*^i(m^^KlDq>ey zea_FiEg@=;J=tedgG%%y5B zGUh~LC??D6z(l?tDY2BWO^Mnza6D3YxN}ly9p$8^2LQ~kgsB|RCgeIh3bS0B%Du=%lMp8rA zf-&<`1twu82~b?hG86Aai(I-`dC@2;gH5-(1upK#Ti<&{TJZWF7yC9XwziZq3}Zfp zo=8`Kwv&ACvCx&5WlR_Mjr4(igjz0yQNMiX$*Q|u%`V?Ec03WjSbdKUi48mMyR4R59E9_?dmVyB8{ZMwr}7LLsb->vEEKnZ^g_1 zT(88yuE_F>QVI!jEyR?E9B-iR*0wuITFdqAsQikHg*et#)+B-6&N=x{2|a3On(HSY zuZrGSi&wJ0l&Tbk9x6RfRz-d42v>zOg`$c%<=EqnmrRmiswUu$IGk}qzf!7{7FdI63_&km zN!_&ZWpS#6l26ePd|Xn@wJl}Bow*Xq!t^cW!vN)kCZLD^F7Gn1Jf5T>smIRR>JgquXmhvys|P$^8X=N9_ESI3bmtK zKO|hkJaNhoYQwf)6A#$9qGJQNPJbD>bg1SOc5C}}(UIX=ON1+a} zwx7&)u+hxA+wr7ozPjOhCh=hy{sl z95d*S31(<`UfQGC$)JKdX+ukR+!TyuU*o-u?)!ld39kOTFXBdH}$Qap)j$;2k>F0u7iu1mW=Qs+A7kuI0g zhVjuRRMdaG-Jg)XO@^HVEh%KCK-Mhr&TuQnD%al+eqVo=5`@1KcKyo)^eF<4RZ~!V z7bj$yo2x5zWTD$v)y`t!wsTT<@$d@49hanv8(FvW;Nrl7JpB&hocwG&E|ruN zja&sl+`W_EsVQ3x_ctB~3XcPcn*2`BdBD9 z>ol8^q?i`qQSravO*fv6i+fAnBOb?w;|=SJZ#wg)X6Vp7W*@1;0 zz&}99*>4+^NH4iy8U_tx2{6NrM~zslb{j>zLXW9!U4>0du+fj88ZZ{m6&kJ@EGQqx z6DTr@$XXx4`TLhmjmU{T6eIjJC5cg#dbnWedgqhGQ;9i4KV6lMuA?0@ z$j7EFc6X0UNoanOwc<}Gxo3QFU$XGHtVKVzY_&SaVRJ!PxmWqwr2J9-c$p|KHy(8 z!pGtE4(y19-GosF;Ek|%st~ek3V|5GDHr^qmu8hJxk9kCeA*spy$DiG`_j#{CbF& zjSuG@_PQ6x92HPDLf$PalIy7NQN#ZHxcMk9K+CLi;>aVoRbY-IPJ<7C!rcEY9Wa9z zvppiuA8F~4m+!>aoSl_VkO|FhupD4m4bx8;XFg~O$4cL~9C&Kasg0qb9%jP>(?_YJ z>_J;HC1S)~>`hc3xzV3;^}zlq3c^8;EXNQV1sww@7U6B`n{pJ57Mh{5n+jA^H4M7_=f| zIt5glZ{ny0=EJ4rIpV(GKx~4Tod-ZAxBU9;G4R#DNUbluXULP?eG}Ir)e=CAL#UB9 z)O!kzK9K}_52_0Zn?o=SA~baL^EG#J&13%#x z7vjK?Xc4FGv4IFAZxBJq=jAYUm##aZs|^EuE)obhUh(FBNtrEjFu1;mcj4oNdq(DY z=u}!$(4xLs|A3U$ZZ46%y*4XtDAp{1oMiwfd7E0w!Qs;iEvwPiDL^XF8Uc_e?Dg6N zebg%R)+W$0TJSPzYWS5*a)#H1vve)@j2voWxHR7!H(wsW8)KheGg!Y?iy%iMS{hG(|rUF*RNRXm8tp(flzDLnT zKhOpSL(-W@oWY!cX96V^FWHoa-fIzELxYm)e!>nZe*>ZWXY*5b7PuG+Jr)&tOhuEI z6Ykc|TbtyxxtN>ct*ru;Zu{~HV~nt_>!jSRf*TwJ#_ikIwV;Z;%!Eb1_Ak} zL{%ns`Fy2B+3ZOHE;++Zh&mQFj;7vi_W5>lYBQ;HPT80Nq+>x^Ce+x1JkRnT_BOT; zu&RP|#2DSEwi3gFgt8)d5b_=|tk}Uj?Yjcyv80l0snfmXtRcRaK!{9{50xFX>mHa?CF(!9` zK|4{eA=AEAnBS96IljoX3uP4b0y5p=>&ivU5EFHS8sy@*hgj zggFEmR5zMXB&I@g-LMQxFAK93_QuP|8<;6;VI)zV`M1Yns+o%}0T%6C5F|4?A zZzS3zcfq-8$e{0%;KDi+5>t&nU%w>@Nrhd^UG^+fw0m4BW}#SN?R3PTgNAKA4c%R}%a zg>ZtCx5RyowtVFs^>7)p#DS^jP;nn27!(()2;^gDAo-vu>0|B+Wmg8|oCg&=#o%g) z`=PvEdjl-cIF4>9`(m-WdiCswI+xAf#WzRDPy_9FncDmJ>jqJDzqckT2 zEU01c@B%L9=jp4SZ~a-BaOt(SBe{7)POb@0b+)7fyw+YFvAu2D%qk2Nx`uaU3{)}- ztAgxdi6=_chjWE{x4miD(lV)@YG0RRC>|SrA#D(UN(4y>AXQKiT2F~iZzx6=Y#M<~ zQb`X^+w&U&gHTh9mGUrI;9HdL89hpKY%ZKoan(c~T-#^&NU{rB!d_6vU?xP?Cv^H8 zHIz3X!;n%I9!_;hE4j)JD0h(3QhK%9uxIp)C?RKK^dVo(MR7^j660Xv8LOdI)}fryCTdRcCX6?uJR2WjI6HRyOM%CGzmR(o%u}z(!eLoY~nUbsP98jepqbT zC7`@;uB%i*l1GSf!|Vc7GcB&dbGDBUxi`yGonSHih$;;Dw_wPr9Gld63xYN&iBYY0 zlJ!LjpM;?)in#t^QTqf%S^^@FwiRJeT9yWxx_vVtELLfC*A(|VG3B5dAZy2u*k@_j zh2@8i^B_PW61O#$zDyt9zoiOQkV)N8@VX@U=Y#e+=>LDB0SM**LAL*=x9@;!>TKTz ztqQI>aPMtJV@`6i+=!wm0x}e5%mfqx8H%Gg5XG%H>mDd|R>jtddr$>;apT0jP}DlA z-}Rgk5>A4m|L=X@UrU;t^Xzet>%MLYvS1@Y8_y(Eh!((rjOb?A&5=PIcu*V80+UAr zoz}uKh#Lo41dziHfCTOoB-gDq9|+#i!z+Z8H2^&ufd?c!A0~;x#v4d$2tYC@Yz#7- zKm?F%?G1_sVYFrlnLuJeA@&+<9F+u+88OJ|$m#X%FM_SlAUDXdvvh#9o0&;W_8OX4 z7Aa+?5Qv(+@sT(f+=%Exi7klDkTD*V41l=laY%q|0&RdXhNPJ%e<-^yuC5%k0bVMg z3@PDP5{cj`iB$NNO!$>t_?1HVl~VYXO7N9b_>@d6cuGbKzY?}fC2YM)_@c_ni^2}U z6fUTek_cM{K%ekAnQ*WvnU(V7!tW%suz6C-%D5@4e5Vw)4{;=d_R$Jqy@+-YK4)d9 zXqB~hg>?YeD<}`FG{Wy_VVkjih0iI4lM4Y?_?${u9>OgI&jDp2{7xbqdzF=WQ^|!p z2h0}2@~q6S%F2AG6jsVp2>Yf|SZSX^IHn-i7PiI8+JJjdIGOlTI5L#j%1|Tx!^-R6 zb`Vq!kBx9f;RF^o9n?D3o}`5XgeQd~jxQA^ktXj(Rdx09>aOtAAaQ^%gfa`pGQGh#o}|!4B^1C+#77nu zK9T^{L(~j^ima1|Z=W8h9hPmFc9Hi2vZ+A~-&2KO*T1RC^RLc+9~5LVS?%psH+ z^EgeMT7+ziqI?*)7SfbO8Hl-Ad5Z!ZOa@r_6`Zg| ziZunOQ?!@ApSQoC+SSF|&)=uBunbU@!s`v$U5AUyz##`LhU)9C4od9Pxr@86UuU1r zoyhQm(p5{+%NbE31?(}ovpD;YX*j=Ls;rZrmya*XQ;{Um2=Fu_dLOaeX6~56c{8NF zTs+uegL0_@u#{3DrHR?u=r)r7T59EL9f| zwYQ6>yQ|vO%iq&aFf}mvK`un#0~BjLvXM_=`hEV&}-VatCzn=C;mXe@<)82 zk?{S%xruz~;LJ5!{;Wz%lfxNL0)2o38FDR1G7#!vrkk23lO{222sMT}3Fv__Ohw$0 z>W=PyfPnM0VOd$@ReyL)!=MPFRI zl8p?`G^v#2e%FYJ9>W}l7U#AII-1h7UGm18@1Phg7D|Hw^j=WzVR*{hvU$&1o&(A* z9$p9<04QQd%6P86}i3L}`RHdXv5qP3$!H<~#ny&|<2-1x1gBJ%=-7lR8nBqOr85pKLy%g- za3_9C+CmkL49H3i!c`Dq3UP|9n}P*|SYJ3C07GspF~Gb?{5J?eB=8Bn8ez`_34sYC zwnkbnl}n+H(-s5@=H)QEf+?$|fnigyIjihLra)j8#m^SS8FNfv9zg{tlwA?*TW}4+ z0LDDUtWe9YDfyY<4qb(TpC5Ie)d-v@KxAnZTG*?kSjpDL$W#zTTTa4PA*IYI3y`eb zsFj1@g+q+lXl9{HM}QOZ%^*juQjdgk4C;_UabuB$!?+#>7z;?Z1zLtsM-ZwfR&*1= zaaPEjT>|QEb~4Es$}zr5NZ5$fsQq7_o?NRmFxN{EVJp?pP&yPyh=kTPwe_Rb_F9~k zme@h15$6a&wd7$55y}f@eo$xv$E1LS`lIeauBifIbiQW(Ahppj1^YfQ%0W8d*$`N( zkzhcyV&a6#ca0Tt-pP{Gwa^=ef+QNZ8&U;|BrJD(-U48rQS;A;V4&0i&7=8WE(FUa zv`~QclOzIwq=oQUJ=pNcg81q~q_4L6VX;6=Hvp^yf?J2UulR^n%PK(t1$J(a;Gyt? z>)nP%x#%_FIHt+1Wd4$wkJl|MVkl#hB^MHRDQU$t0AotIN(T3*35KR%LC5(k3^4?1 zbYKVV#LeJ0L%a-N&Xs{fLPNsuRJtF=Trdr@zL3}^!bee92@jLX$Q~yDG_1l5DI(b~ zKU^5e1PO?2;1A)VOig3xtAGF}sf|qAnRQa;pR?g&k;q*S^U60*7Eo*m1()i8O;kYr zz&-f?YuBS~oHL1{E!!;_4kUh8LyTvz7MA!(iY94F82pNwBsa#{BnKHjj!U>$#4?0B zbKWH@>1p6)!STkYsW0GtI1{1_fq)#JNRmzbp$TO8_qtx);mmCws`=-*#!JtPgWO4+I(&YFh=EH)@ zV$9r3DC&Q6i)uE&;#23TbhS zh&$rUa0N8R;%&W7E=4eu62Yb_6$oa&S~o?rLlbETM|Y6_i10muvtOnKO*(;J^PbWI zT41FN5cC+Z$RMR^ba2goeGIIWfi8fn2vneQL}xhbP{tt;k7N)qGNEMFPBA5~I-n_N z1_GZ!APgD?1o)B5?uVeh1&E!zH9~l-QW7G&n6(N=mqe@6;{H|1z@}^C=uq-Pwj2bO z7-4irBSWyJREuj#r34u&XF!s((f*e5(|5?|U{cVE;r%jbv`BKHHm=#`GWra*PnElxKMoK(x!bNWQ;1+6vcL)Vq_C;oM zeU-8jI4uHxM&vTs^^|;w@AnY6SnvcvWd`pOg}6R2Q~PDcR25Gf=D zT@WZAzQ14#0vC7yiI}ehg95?}_1qmqvs7OVJ-ney;vYdwF0i{~iD{z)i5|&i1OVf- z7U_>^0B8yI5+}wfSzVHyS}bIiOB6B1L^u(a{4H}RIzkyv9XEnA%r}e(?0K&_(31Vu9OBM(dF4kzJeL zfqKS#B6w;g4X=Xa)Rk`YB(-EsSSO;4;&DDKOO(!?-80r&^; zEe_Da9t0*gB~xs^mB4%_($Eyp-BQ*6auqN&w@^ZS!5+dPAqEM(6viK7NVV~swZ>Y^ zyiGqKsLp{QA}&ZWq)L@k2jrR#yi`)euy7{|IRuHHPol7DagA1T=24s>GFUB@luQZ} zIB!z;24FdbbdD@?AMrHma5ggsn7zReg#Z&8*&JySPt6-0ncgzf#YG12w@<#RNGw}$ zBwX&;@8}({=|(A$H4q36DF))dut0VoIa^G=;;ilvv=WeZFh|m$iqY}G&g`4)zbR!9 z@Km7ZX3Ay*td(%TG3*0q7VOw0F9;w!px&d|FnD!DpqeExm{1I?do--l2pNEh&ZyDa%3b4Ye)PPqC5;>T>{xZcAT{;NkFp6 z=?iX7W-2XI`CSS}QmOEp+XLK4fbOAT$ zd~Ofory=qfACt!|dJLdQ#Dxv9M-@4)XfhkG< zNNS{DGlt3h`-{z|*%+bA>ZPD~N9Y~wDIALFdsHS1%?9}Z!3v^9dP1;Mf+N^`>HJ{0 z5llXE>@4Ii1<8++k8Jtc35lrzv#wGrk;B_2h9OgiHsudGu!8!P$iH6XUythDqyBmgi2lJu?=@*DwuO*C@; ztRv+!gM%~B{6NZc$Rb%|ei#t(tuhT`Qeucid}ut=i00lUa#y4hotHdBwb$3N})btpw4%A`x@{ibQ@9Bgv8I9va1* zh(gVj)!qfckH`oHJ%b+v~nt*CrvbAvc0y|dtTEa5an$9`4gAxiMUCaeR zd=Y9b6l-AG$VGuDVjb-N5W^{z4yl`MRAzQhn5u&Mgzgce80d=hOd1d5Alx&=%*ol|*vV$Z7S@|A0qVOo~S;Vh&X1M_$G;KPg!Jof723 z4BML0gZm&SIvX4Ds?_0bM2;&2V#8{0t!f74*OD)T9}g}o4BJ`e5s}FBjEJwu*7gt_ zhP2+OfDolc-gGI9uAKP5oIxCjP|Q>Uj3`jmjFRDwPl|BONG_K(F*Ilq^$3Wo1PKnz zL}Al7u|3HvhwsbRs~L*GV@)8@aKTUuACeXyWr%#{BeuU?h7~A|-A%C~_JEuhIzev9g?*CNcE4=F2s&JW9$o8Ym9vmE;1G zf_@mhRV3$x5;>%JqbC_Ov)5D>q1jk0Y+Sb%1mvJ6Lo_T_296eXtmJIg3U&!Im$Zo; z0pq0S5ncidE%T`LyajMW=t1!!#&TAHyusRONu(cBJ?OpQ2*O3n%NjqGZNkLkQu3L#`1Ll7Ren z$Tna!T03=Nya8*w#dajBwe^A}yj3O?5#$EeAr{OKf+77ZxWM3ChY<=Gd~TY*(oQ*R z;~`57hGrweYT=hdfF~c2X61D;(@no<#0;2vsfTxqk|S(S31&4TucQ?Ga$eOSQ3Y2s zWk7xdxeUpt*ad3yZFjP+Wh97?Yz6Qe=@=AvFo%LI8T6Cv*4#MHd?rqYd~N&NfUp zT&p@5BG|@%PqFzFb8rwKE(k{^Nn%9tBnID8u*EPa0IdcSHF#FQT?wOCzQIkWeCD1H!u-$ zWh#b)*zA<$3$WsjWR6y0GF63M16B?NE)y(rto5k4ALM0v2tgTbJ^so6daF4-s53;B&10PA%U zf&c_t6U75oY1SBnZrfOMok$yrTFOB;V8aH%G?f3<%MMtxK=@8w4OQf?s7aXxwlhQP$Kz^W%u~jqsb0=9G>C>JzkFtVK2- zgojEE@DsqA&s`QA^%s%@G3Fnp`L;j<-fW~CKdx-pEmj>|--O&&iOwLfod{Ec(~VfC z*o2(cL|trCo+?l9CUcfD5Ji{@5Pfd-=w?iZhA5o!Q5w@AG5Q^2_~L_+R45FH5fdlAuvQ992e@g{X*DL;q(;k0{DF4^zVnG9XiigAWiAxLCDf zizA(MnDL>hh$qW~@usoD3`2%NPD}+bp0J6Cp~r&p(3-&%g}^KVN1Sjl^OY^v1x1Gr zAvjh!fZ-YyE)9u90|X$a#;RCDAS)xGSYk>c<%&$Lb7cx}$8{jw(ag|_z72;5w>6y2 z0R2!v=IVJLn*6Kv3v0C@sv%H>(0L)Ao&w66r_N_)T)`RFR3_t^3{mY&Hfae0zEm0{ zKnIN@7);Dur({eRBNy3IMB@vMh9T41u4OhIl5cVrW0S}ttd^3%Om1ofW<`nxLWwai zB&{=G_UvI~VIMHJ_%k6cU$gh;JMxV(tbimsJ+YNZ6*rF<|GlonxzoD1u7qbXGh@gY$C1#EC z9SUYJCER_h0Ye}31~EL861|MhuU>yo`K-f*E1xDtYFbMIeNY2!0eks+`~8WmRbcyp zssYXwyF(9;Fw)qW&-r(jZ_$1T5qc%Fy3z-B1UiKr59{C9n1?;*mooLH>Pyk~Ji;f(;$j}V^k1`nc@|zV%-w4r}EZ>!CW`W`@Og4K6E@A9;8q8$i&t#-OlQ_X2 zh)fj)bc54)%&V~E_$I=RQG$9DKn@|7h?Qmb9d&Sm1GgQqwqXB5iVd9{L2ClN-!xQ( z>xboz1SAq-3gW$xs!yR*S=%m5KUmey>Te8Qa=d7?$D>(dUn%`HPWdV31TlNJ0RDc}JZ67IeH&7%w zA;N;?rg*>a3PS)6OM&PgM2>>m8pOQZ%zjU+6oAt!kf{)%o+*iLa*XeDB3Ue-E9 z>_J-Am_xc_F_s((6mDo_com5}n7_94;d}=JFn5#+ZXy+w5769OGet4Qx8zDUA5LUU zBqZ5zXT~5Uj~UU7P{3IlmQvU_Q0^ZPH)70w|EzzPRZ4AG`i827*qs)J$3G+}v9 zQ%cs~&LGAjVf$g#avG*zBd80IavVXOyk5+RDQ54oRjEV6^1umjxrxEHAccuTO2cw( zwNkbSHADgkzfhoXt-}lq@P-W5OtUu#wjb_cN($mE%yAy?x(>m(G*U9d=nLeB#Xzu( z+As+MVJ49?1}dNv2x%4b6_cHb@VBF8VpJ#emwHf%DpWdiWPwE~G+PStA_>cY$V-gh zNJWVZtH_gV)_RLFBy1UM8^R(GqR;dOtpH0V+_);}_DClx=DGJ+i4faO&MNN`Vyfe+ z=c8=+65wE0=n<_C_EI>>ER{sOMRB9R>|6dFWOY*junYDDDKegF%=NNw+-8&ljEX*f zYA-i8-_Cw&AAe6@^affCuwpt;`D=9mT$$?`-&n4KEszwZLtZo_+ynL-IaA@!6~cHa zsl?9we;=|+AGEWpPz&GLbKg7RyCV3`(XPl(c6LSZe>=Ni{1d?aKEux8c!#=0>`HV0 zUGSe`9ZbLD0Mo*5tCuN7`jPk#yApxvj`JD>Ong|pWAYQv zvV_k?YVJ$E;3y8)?N2-89{p2;4i zK9~J`X0rg=`^GPxE$tS6(F`d+Y3ACAr*Ablx9i!keT(c}j#jd3GC2A31>X`eyT>bn zyPb%A{-A{V^w5OcB@$nb+SC8=gPe^=+mvl^Kf2qmXUjEjF)*ZApS!Ibqz;-#tLBd% zk>P)5U@OJ+_m$o0~)3F#Nq?Tkk+Pw6lIw`jcD zp~*k;d%0Ui+f|iH!t}$>HSRy#_d@YvWfOuOFEu+hdSAlmtR_AG=(okb`R;4=R>V$e zxFP$}z=qeyxNTT=rC!^2gEucaUZVf@y=9;7D8A(P`Q5Yht$KG_@gS#}SL&ZX-=5v0 z^n?ahjgBM>+ky=EBfj$&Ete-H+!)lpA%rUiZY`e>?mn zG-&Xi&lxNFCMI^6-tmg6i z+Bv4uk&`3!{;&AU=$IFcOAZRXbzR* zKJ`$mjA@;QIK7XSXO6q?Qf^h&{+M1#P9xt(zr5OgVdL{orz>1ru`}$zkrmqWqh;6Td&>9zVSk~yyL0e z!GZ4;PghP{n^kXNlcee~;~kQhHf_<&I3l<5%--n(+D|+rIipzlc#+qpyNZK>L$&_X z7UooUtljFEU8UHX)ylM*J^%8vP}kG1i~L=-k<*8E+uRfTNz?0XPJ5MH(eKV7?|-LP zkUTze`AI)r_JNS-(vi*Tb&Ofpct+WQCsRV+9JtC%K(^hqQe31&3os%0Z_xETP^`Uyw*ll{}14{=u#0~yq)|728BQmZAjeFN^rSBc( z$)tww)eeU%)ODNDw3buOnH>w?%}v;p7%@IH)qlx=^N*Ds8nsnEy|caK{XWUz?R=*E zl2pB7+k4#}`|UdYH2h|ld!-iD-Ra)9jIZ>}i{LQ^L(Zw#zH_uXx7zRRn*cZ&rW4Gd|2$$vEtK1PAxt-ug!Vs3Ti$MKv#*?Ey7)uiX?`pI z96|5iaPsbN+EJ0Ui+*l!`%9OHWn|CG+KtiII+wX;o&D<@TPwGXYU!QzXLk)9bER>o z(`ELqNj>$hV$Wi`j<#>#@RDEYy?w?`I2HAH+Tpr)lM=2_nFnfSwAZ-jl_b(5N%yI}@zTUaR{^!4Vm(TWSK+nE8A~~hkku}j{D&Oh#TV>~N z?slbb7T>3++NRHeh;GNOl&-Kmv%bRDt~BM`IBoPi+2XV-xj(A1b@!L? z&~({fJY#_WfcYMa8=Oovj&zLe-#&TA>+61L6SmF&&?@1wOGc{}d+j|JI<4XA_3$cn`>OXpd{?vL%@e)CSEd|4-(^9I@%5GCwR>jmJUBWe zTpM3h_Vbn`$-ohBhMe**d#T#RlV$e(-TKng?YDNe9T5;;?nJ3R{T_dM`01S6q#d1I zuKKynF8ZRTQPd}8M}?8k?c>CMjmsJ~G?w11n(e;xQ1(F0@LB06+jgJz+rQbv)90)j z)xSc_{>DXbGp1$h+6-)LGMk2-V8kb{>yru>(y+R)gNTOo-((M>ld_5 z8u`-OPbJx1?C8tV=VtV;H);K_&S5L|N6G#iHF`<)C5h5XopN^c7`AlB%(?x}b*}aN z{;4^+VIov|004nb|Q*4HqV+JUH*x zJVATWE6mwr#L+tw_Y67R_IculEQf<$D>vwx^xAT$WI*R?b&pZ2ljIe9eWO`1M9bMJxZyaUGa;gwIt;Hsq9amkB-X-+t6y- z_!sNzD*ujrckNZqjijLR$BG_4S>ycX)c33U?Fm{H^{nEMd-qqpINJ1)F}_2Up?9b3 zJ=7%N>F&mH>fUq8wtO{hWQ82%Z^L@bJJ+|Jr+rVm+dpm5?5<#}uO6Fr$Vb|?YxM@D zyS(1ocJIfa9&VE)Ket=+q}00}&7<2dxv=#{kav-u5kGh6HM;NcnZeB*zwFz3#3R+K z&F8F5#kRbOE8hHQjbbwf+z;x0JYY<0<-h$q4jMCY)B17KsxF~_%Sqe3^ig)e8VBRV ztOaKebZrn<$MZ{<;~zULI^td(-a8x99fWHuqLqg#~er zS#t+}O}_on-*cMNaRa=zq!p6~rOYkm8sqh-{L|xy5+@Z~Jo;rX(ZpvRQ@4+-Qg?N6 z_39%X`nJkw;$U2He(1FM{pvT2c(rFkk*wODX(xX7Ycr^7K;WW?Hx248x!CDKQ_n3) zJ$hDHAf2s{C8n1xXH*~P6CB^~M2%q`r=&eTn>fa8*Q4V@n>7Eej(f1tu199%Naw_w zom!u4`6MLbPGr4K%Ln;{9-a8hlSd;$+ODg2zQ*la#aqoN8RIbj)iD3Ef4!`jRz{WH zBi+~DVf*^{oNj5}FKdj7A9JNkzqBNmAy-7N4%&aN`eyvJRog$_ZgtXed4}`J9q;!v zo1Rvx)~W0vHR>;JyX;~q>9VUEOVb0F)n4c8mfY-P$I)MwFTAt-#m%2ffBICr+1x=X z?Yo53%-whYMt^ClXBXMuB_dqbKDu?~*2v86HScI9_f6cC9DnC@#Ixs%&h&fQV&ApD zX1av-53DeFap?mJY2(3zZ@wEN$&jrNjYzHUU%lJ5)g#)bh%*<5>6)IaSuP>^Q{N)t z|N3nUZt8G1DyZY$^G6frBrhuQvvZ~S^Riy`m?U=|*Yls-3rEDaTT>={=A;FxH+y_T zDmT7Zta7mDT3M+E{Tz3DS6b?n`s6>#=lIe~*=3sfyu7?}%**0SYF~EWdqcjrMTu?B z<^Ai-jZH`zwhpH?J6%Fr(|zCkYoO-RxKM zRU9>;QKI~QiQpRDGBQFYo;a-;>*J)mwx;BpajLSt8ts>^Zu0S$jrZpVq>O#`>WQeM z`_!_7)IQe-l%Z$)y?GHGzp`tty}rWs1^dS(H(tEkv&)~$hE+Xvw|VbpZb`S7b&%X# zdPiGjYq^M7i@KkT)HmFuTeNaS(~{A*9jA@m)NV#?I-zIndP~zCI;?pew&dtA@#iGj z$x9b+by3$Cwf~^sjU`J4pI`NK+`+wH_LV6ameO8x>wG|JF@yT_Y7vgT76`KECYL+vnEEO)ZT-U7(r?$7`4}8K@l=1wr}h4wW;QwN zzv^&bkGLn>|0+IX-tuJ`Ee=(=UZ-~G(=N|Hg$K5>+da3)+tEYH?yVYTyzOsXP%36& zvofvXD_s7`W8fae@0F5wma9K>!@BYNifrzf9qHh7_MZ>-JDx4N)O=R|Z2{%B*Q~eV zxLfndt6U}t4%FV+baYGkfkWesuP-<6bM&+8^tZQ)zI7V#Tk_HDuKTB+II=G8 zUPXs4-6q{$)78V#ZH@QvluiHbj*eV#RD7(|xr)lzc+UsoStA~|?LDL9ng@<$TieMm zzI=Lm{Nu;s*nnr|s_+5{uT~_XMdbjvRuIJHI*8x8*-v+~HZ literal 0 HcmV?d00001 diff --git a/.venv/lib/python3.9/site-packages/based58/py.typed b/.venv/lib/python3.9/site-packages/based58/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/LICENSE new file mode 100644 index 0000000..fc2146e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2021 Thomas Kemmer + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/METADATA b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/METADATA new file mode 100644 index 0000000..0b1d25e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/METADATA @@ -0,0 +1,135 @@ +Metadata-Version: 2.1 +Name: cachetools +Version: 4.2.4 +Summary: Extensible memoizing collections and decorators +Home-page: https://github.com/tkem/cachetools/ +Author: Thomas Kemmer +Author-email: tkemmer@computer.org +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: ~=3.5 + +cachetools +======================================================================== + +.. image:: https://img.shields.io/pypi/v/cachetools + :target: https://pypi.org/project/cachetools/ + :alt: Latest PyPI version + +.. image:: https://img.shields.io/readthedocs/cachetools + :target: https://cachetools.readthedocs.io/ + :alt: Documentation build status + +.. image:: https://img.shields.io/github/workflow/status/tkem/cachetools/CI + :target: https://github.com/tkem/cachetools/actions/workflows/ci.yml + :alt: CI build status + +.. image:: https://img.shields.io/codecov/c/github/tkem/cachetools/master.svg + :target: https://codecov.io/gh/tkem/cachetools + :alt: Test coverage + +.. image:: https://img.shields.io/github/license/tkem/cachetools + :target: https://raw.github.com/tkem/cachetools/master/LICENSE + :alt: License + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: black + +This module provides various memoizing collections and decorators, +including variants of the Python Standard Library's `@lru_cache`_ +function decorator. + +.. code-block:: python + + from cachetools import cached, LRUCache, TTLCache + + # speed up calculating Fibonacci numbers with dynamic programming + @cached(cache={}) + def fib(n): + return n if n < 2 else fib(n - 1) + fib(n - 2) + + # cache least recently used Python Enhancement Proposals + @cached(cache=LRUCache(maxsize=32)) + def get_pep(num): + url = 'http://www.python.org/dev/peps/pep-%04d/' % num + with urllib.request.urlopen(url) as s: + return s.read() + + # cache weather data for no longer than ten minutes + @cached(cache=TTLCache(maxsize=1024, ttl=600)) + def get_weather(place): + return owm.weather_at_place(place).get_weather() + +For the purpose of this module, a *cache* is a mutable_ mapping_ of a +fixed maximum size. When the cache is full, i.e. by adding another +item the cache would exceed its maximum size, the cache must choose +which item(s) to discard based on a suitable `cache algorithm`_. In +general, a cache's size is the total size of its items, and an item's +size is a property or function of its value, e.g. the result of +``sys.getsizeof(value)``. For the trivial but common case that each +item counts as ``1``, a cache's size is equal to the number of its +items, or ``len(cache)``. + +Multiple cache classes based on different caching algorithms are +implemented, and decorators for easily memoizing function and method +calls are provided, too. + + +Installation +------------------------------------------------------------------------ + +cachetools is available from PyPI_ and can be installed by running:: + + pip install cachetools + +Typing stubs for this package are provided by typeshed_ and can be +installed by running:: + + pip install types-cachetools + + +Project Resources +------------------------------------------------------------------------ + +- `Documentation`_ +- `Issue tracker`_ +- `Source code`_ +- `Change log`_ + + +License +------------------------------------------------------------------------ + +Copyright (c) 2014-2021 Thomas Kemmer. + +Licensed under the `MIT License`_. + + +.. _@lru_cache: https://docs.python.org/3/library/functools.html#functools.lru_cache +.. _mutable: https://docs.python.org/dev/glossary.html#term-mutable +.. _mapping: https://docs.python.org/dev/glossary.html#term-mapping +.. _cache algorithm: https://en.wikipedia.org/wiki/Cache_algorithms + +.. _PyPI: https://pypi.org/project/cachetools/ +.. _typeshed: https://github.com/python/typeshed/ +.. _Documentation: https://cachetools.readthedocs.io/ +.. _Issue tracker: https://github.com/tkem/cachetools/issues/ +.. _Source code: https://github.com/tkem/cachetools/ +.. _Change log: https://github.com/tkem/cachetools/blob/master/CHANGELOG.rst +.. _MIT License: https://raw.github.com/tkem/cachetools/master/LICENSE + + diff --git a/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/RECORD b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/RECORD new file mode 100644 index 0000000..5d28d38 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/RECORD @@ -0,0 +1,26 @@ +cachetools-4.2.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cachetools-4.2.4.dist-info/LICENSE,sha256=cJEEfa2G-tF5p3smluByDLAX4PRo1HT_PvXKLpe1qQY,1085 +cachetools-4.2.4.dist-info/METADATA,sha256=_aiz0OsIkcIshjN3J-2W9xBtkx25ZSUgcw5hLI9ns5s,4805 +cachetools-4.2.4.dist-info/RECORD,, +cachetools-4.2.4.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92 +cachetools-4.2.4.dist-info/top_level.txt,sha256=ai2FH78TGwoBcCgVfoqbzk5IQCtnDukdSs4zKuVPvDs,11 +cachetools/__init__.py,sha256=sgSnzGmjze6qg38nISmsVN5DRv7wY07X4E7JtZqA-nI,16837 +cachetools/__pycache__/__init__.cpython-39.pyc,, +cachetools/__pycache__/cache.cpython-39.pyc,, +cachetools/__pycache__/fifo.cpython-39.pyc,, +cachetools/__pycache__/func.cpython-39.pyc,, +cachetools/__pycache__/keys.cpython-39.pyc,, +cachetools/__pycache__/lfu.cpython-39.pyc,, +cachetools/__pycache__/lru.cpython-39.pyc,, +cachetools/__pycache__/mru.cpython-39.pyc,, +cachetools/__pycache__/rr.cpython-39.pyc,, +cachetools/__pycache__/ttl.cpython-39.pyc,, +cachetools/cache.py,sha256=_DGN7oODkIIC0PrOZecutp9AzE1934_6X9OsyzgwW6M,141 +cachetools/fifo.py,sha256=6VPRGQjrfr6OlBnL6GnB6ANWFioXxG3lqR_sRaZoZWU,148 +cachetools/func.py,sha256=VuGyCt7cOllN1FUuEjBiU1qQgsWzrfCHuajeTufQwkA,4906 +cachetools/keys.py,sha256=SfGPnF5Goo1b8V-lrhB9Jxgqd3vm80R9h50k0hsAAjM,1466 +cachetools/lfu.py,sha256=pbu4V4dnyQL3eH4Kz4a5_QY-RHdVV1Rklqr9Onk95yo,145 +cachetools/lru.py,sha256=HfWFXBSQuAw_h2gT5Sk4TJjENUVZ5AYE9tU2lxaR9ho,145 +cachetools/mru.py,sha256=gVC_kOyqGlrSayVxrs1rb23WYWI02TejIQIPcuYqahQ,145 +cachetools/rr.py,sha256=4zUXdSViOQ0OZFHsaAhTO_RZfiL9B_wRMzzYuh3qmpQ,142 +cachetools/ttl.py,sha256=aTbhX3-igXvCxKNZPyvVN60Lx9cWctu-Yn_dlk481UI,145 diff --git a/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/WHEEL new file mode 100644 index 0000000..b552003 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/top_level.txt new file mode 100644 index 0000000..50d1408 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-4.2.4.dist-info/top_level.txt @@ -0,0 +1 @@ +cachetools diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/METADATA.toml b/.venv/lib/python3.9/site-packages/cachetools-stubs/METADATA.toml new file mode 100644 index 0000000..cb7498d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/METADATA.toml @@ -0,0 +1 @@ +version = "4.2.*" diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/__init__.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/__init__.pyi new file mode 100644 index 0000000..06088ca --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/__init__.pyi @@ -0,0 +1,67 @@ +from _typeshed import IdentityFunction +from collections.abc import Iterator, Sequence +from contextlib import AbstractContextManager +from typing import Any, Callable, Generic, MutableMapping, TypeVar, overload + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_T = TypeVar("_T") + +class Cache(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + @overload # type: ignore[override] + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ... + @property + def maxsize(self) -> float: ... + @property + def currsize(self) -> float: ... + @staticmethod + def getsizeof(value: _VT) -> float: ... + +class FIFOCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class LFUCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class LRUCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class MRUCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class RRCache(Cache[_KT, _VT]): + def __init__( + self, maxsize: float, choice: Callable[[Sequence[_KT]], _KT] | None = ..., getsizeof: Callable[[_VT], float] | None = ... + ) -> None: ... + @property + def choice(self) -> Callable[[Sequence[_KT]], _KT]: ... + +class TTLCache(Cache[_KT, _VT]): + def __init__( + self, maxsize: float, ttl: float, timer: Callable[[], float] = ..., getsizeof: Callable[[_VT], float] | None = ... + ) -> None: ... + @property + def currsize(self) -> float: ... + @property + def timer(self) -> Callable[[], float]: ... + @property + def ttl(self) -> float: ... + def expire(self, time: float | None = ...) -> None: ... + +def cached( + cache: MutableMapping[_KT, Any] | None, key: Callable[..., _KT] = ..., lock: AbstractContextManager[Any] | None = ... +) -> IdentityFunction: ... +def cachedmethod( + cache: Callable[[Any], MutableMapping[_KT, Any] | None], + key: Callable[..., _KT] = ..., + lock: Callable[[Any], AbstractContextManager[Any]] | None = ..., +) -> IdentityFunction: ... diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/cache.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/cache.pyi new file mode 100644 index 0000000..a9bd3f7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/cache.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import Cache as Cache diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/fifo.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/fifo.pyi new file mode 100644 index 0000000..c6b386a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/fifo.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import FIFOCache as FIFOCache diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/func.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/func.pyi new file mode 100644 index 0000000..8135f8d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/func.pyi @@ -0,0 +1,15 @@ +from _typeshed import IdentityFunction +from typing import Callable, Sequence, TypeVar + +_T = TypeVar("_T") + +def fifo_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def lfu_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def lru_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def mru_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def rr_cache( + maxsize: float | None = ..., choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = ... +) -> IdentityFunction: ... +def ttl_cache( + maxsize: float | None = ..., ttl: float = ..., timer: Callable[[], float] = ..., typed: bool = ... +) -> IdentityFunction: ... diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/keys.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/keys.pyi new file mode 100644 index 0000000..ef3e112 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/keys.pyi @@ -0,0 +1,4 @@ +from typing import Hashable + +def hashkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... +def typedkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/lfu.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/lfu.pyi new file mode 100644 index 0000000..9951e65 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/lfu.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import LFUCache as LFUCache diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/lru.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/lru.pyi new file mode 100644 index 0000000..4ea2d64 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/lru.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import LRUCache as LRUCache diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/mru.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/mru.pyi new file mode 100644 index 0000000..b345f52 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/mru.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import MRUCache as MRUCache diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/rr.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/rr.pyi new file mode 100644 index 0000000..18e2098 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/rr.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import RRCache as RRCache diff --git a/.venv/lib/python3.9/site-packages/cachetools-stubs/ttl.pyi b/.venv/lib/python3.9/site-packages/cachetools-stubs/ttl.pyi new file mode 100644 index 0000000..aee0585 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools-stubs/ttl.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import TTLCache as TTLCache diff --git a/.venv/lib/python3.9/site-packages/cachetools/__init__.py b/.venv/lib/python3.9/site-packages/cachetools/__init__.py new file mode 100644 index 0000000..42822f0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/__init__.py @@ -0,0 +1,596 @@ +"""Extensible memoizing collections and decorators.""" + +__all__ = ( + "Cache", + "FIFOCache", + "LFUCache", + "LRUCache", + "MRUCache", + "RRCache", + "TTLCache", + "cached", + "cachedmethod", +) + +__version__ = "4.2.4" + +import collections +import collections.abc +import functools +import random +import time + +from .keys import hashkey + + +class _DefaultSize: + + __slots__ = () + + def __getitem__(self, _): + return 1 + + def __setitem__(self, _, value): + assert value == 1 + + def pop(self, _): + return 1 + + +class Cache(collections.abc.MutableMapping): + """Mutable mapping to serve as a simple cache or cache base class.""" + + __marker = object() + + __size = _DefaultSize() + + def __init__(self, maxsize, getsizeof=None): + if getsizeof: + self.getsizeof = getsizeof + if self.getsizeof is not Cache.getsizeof: + self.__size = dict() + self.__data = dict() + self.__currsize = 0 + self.__maxsize = maxsize + + def __repr__(self): + return "%s(%r, maxsize=%r, currsize=%r)" % ( + self.__class__.__name__, + list(self.__data.items()), + self.__maxsize, + self.__currsize, + ) + + def __getitem__(self, key): + try: + return self.__data[key] + except KeyError: + return self.__missing__(key) + + def __setitem__(self, key, value): + maxsize = self.__maxsize + size = self.getsizeof(value) + if size > maxsize: + raise ValueError("value too large") + if key not in self.__data or self.__size[key] < size: + while self.__currsize + size > maxsize: + self.popitem() + if key in self.__data: + diffsize = size - self.__size[key] + else: + diffsize = size + self.__data[key] = value + self.__size[key] = size + self.__currsize += diffsize + + def __delitem__(self, key): + size = self.__size.pop(key) + del self.__data[key] + self.__currsize -= size + + def __contains__(self, key): + return key in self.__data + + def __missing__(self, key): + raise KeyError(key) + + def __iter__(self): + return iter(self.__data) + + def __len__(self): + return len(self.__data) + + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def pop(self, key, default=__marker): + if key in self: + value = self[key] + del self[key] + elif default is self.__marker: + raise KeyError(key) + else: + value = default + return value + + def setdefault(self, key, default=None): + if key in self: + value = self[key] + else: + self[key] = value = default + return value + + @property + def maxsize(self): + """The maximum size of the cache.""" + return self.__maxsize + + @property + def currsize(self): + """The current size of the cache.""" + return self.__currsize + + @staticmethod + def getsizeof(value): + """Return the size of a cache element's value.""" + return 1 + + +class FIFOCache(Cache): + """First In First Out (FIFO) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__order = collections.OrderedDict() + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + try: + self.__order.move_to_end(key) + except KeyError: + self.__order[key] = None + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__order[key] + + def popitem(self): + """Remove and return the `(key, value)` pair first inserted.""" + try: + key = next(iter(self.__order)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + +class LFUCache(Cache): + """Least Frequently Used (LFU) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__counter = collections.Counter() + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + value = cache_getitem(self, key) + if key in self: # __missing__ may not store item + self.__counter[key] -= 1 + return value + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + self.__counter[key] -= 1 + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__counter[key] + + def popitem(self): + """Remove and return the `(key, value)` pair least frequently used.""" + try: + ((key, _),) = self.__counter.most_common(1) + except ValueError: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + +class LRUCache(Cache): + """Least Recently Used (LRU) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__order = collections.OrderedDict() + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + value = cache_getitem(self, key) + if key in self: # __missing__ may not store item + self.__update(key) + return value + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + self.__update(key) + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__order[key] + + def popitem(self): + """Remove and return the `(key, value)` pair least recently used.""" + try: + key = next(iter(self.__order)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + def __update(self, key): + try: + self.__order.move_to_end(key) + except KeyError: + self.__order[key] = None + + +class MRUCache(Cache): + """Most Recently Used (MRU) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__order = collections.OrderedDict() + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + value = cache_getitem(self, key) + if key in self: # __missing__ may not store item + self.__update(key) + return value + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + self.__update(key) + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__order[key] + + def popitem(self): + """Remove and return the `(key, value)` pair most recently used.""" + try: + key = next(iter(self.__order)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + def __update(self, key): + try: + self.__order.move_to_end(key, last=False) + except KeyError: + self.__order[key] = None + + +class RRCache(Cache): + """Random Replacement (RR) cache implementation.""" + + def __init__(self, maxsize, choice=random.choice, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__choice = choice + + @property + def choice(self): + """The `choice` function used by the cache.""" + return self.__choice + + def popitem(self): + """Remove and return a random `(key, value)` pair.""" + try: + key = self.__choice(list(self)) + except IndexError: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + +class _Timer: + def __init__(self, timer): + self.__timer = timer + self.__nesting = 0 + + def __call__(self): + if self.__nesting == 0: + return self.__timer() + else: + return self.__time + + def __enter__(self): + if self.__nesting == 0: + self.__time = time = self.__timer() + else: + time = self.__time + self.__nesting += 1 + return time + + def __exit__(self, *exc): + self.__nesting -= 1 + + def __reduce__(self): + return _Timer, (self.__timer,) + + def __getattr__(self, name): + return getattr(self.__timer, name) + + +class _Link: + + __slots__ = ("key", "expire", "next", "prev") + + def __init__(self, key=None, expire=None): + self.key = key + self.expire = expire + + def __reduce__(self): + return _Link, (self.key, self.expire) + + def unlink(self): + next = self.next + prev = self.prev + prev.next = next + next.prev = prev + + +class TTLCache(Cache): + """LRU Cache implementation with per-item time-to-live (TTL) value.""" + + def __init__(self, maxsize, ttl, timer=time.monotonic, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__root = root = _Link() + root.prev = root.next = root + self.__links = collections.OrderedDict() + self.__timer = _Timer(timer) + self.__ttl = ttl + + def __contains__(self, key): + try: + link = self.__links[key] # no reordering + except KeyError: + return False + else: + return not (link.expire < self.__timer()) + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + try: + link = self.__getlink(key) + except KeyError: + expired = False + else: + expired = link.expire < self.__timer() + if expired: + return self.__missing__(key) + else: + return cache_getitem(self, key) + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + with self.__timer as time: + self.expire(time) + cache_setitem(self, key, value) + try: + link = self.__getlink(key) + except KeyError: + self.__links[key] = link = _Link(key) + else: + link.unlink() + link.expire = time + self.__ttl + link.next = root = self.__root + link.prev = prev = root.prev + prev.next = root.prev = link + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + link = self.__links.pop(key) + link.unlink() + if link.expire < self.__timer(): + raise KeyError(key) + + def __iter__(self): + root = self.__root + curr = root.next + while curr is not root: + # "freeze" time for iterator access + with self.__timer as time: + if not (curr.expire < time): + yield curr.key + curr = curr.next + + def __len__(self): + root = self.__root + curr = root.next + time = self.__timer() + count = len(self.__links) + while curr is not root and curr.expire < time: + count -= 1 + curr = curr.next + return count + + def __setstate__(self, state): + self.__dict__.update(state) + root = self.__root + root.prev = root.next = root + for link in sorted(self.__links.values(), key=lambda obj: obj.expire): + link.next = root + link.prev = prev = root.prev + prev.next = root.prev = link + self.expire(self.__timer()) + + def __repr__(self, cache_repr=Cache.__repr__): + with self.__timer as time: + self.expire(time) + return cache_repr(self) + + @property + def currsize(self): + with self.__timer as time: + self.expire(time) + return super().currsize + + @property + def timer(self): + """The timer function used by the cache.""" + return self.__timer + + @property + def ttl(self): + """The time-to-live value of the cache's items.""" + return self.__ttl + + def expire(self, time=None): + """Remove expired items from the cache.""" + if time is None: + time = self.__timer() + root = self.__root + curr = root.next + links = self.__links + cache_delitem = Cache.__delitem__ + while curr is not root and curr.expire < time: + cache_delitem(self, curr.key) + del links[curr.key] + next = curr.next + curr.unlink() + curr = next + + def clear(self): + with self.__timer as time: + self.expire(time) + Cache.clear(self) + + def get(self, *args, **kwargs): + with self.__timer: + return Cache.get(self, *args, **kwargs) + + def pop(self, *args, **kwargs): + with self.__timer: + return Cache.pop(self, *args, **kwargs) + + def setdefault(self, *args, **kwargs): + with self.__timer: + return Cache.setdefault(self, *args, **kwargs) + + def popitem(self): + """Remove and return the `(key, value)` pair least recently used that + has not already expired. + + """ + with self.__timer as time: + self.expire(time) + try: + key = next(iter(self.__links)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + def __getlink(self, key): + value = self.__links[key] + self.__links.move_to_end(key) + return value + + +def cached(cache, key=hashkey, lock=None): + """Decorator to wrap a function with a memoizing callable that saves + results in a cache. + + """ + + def decorator(func): + if cache is None: + + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + elif lock is None: + + def wrapper(*args, **kwargs): + k = key(*args, **kwargs) + try: + return cache[k] + except KeyError: + pass # key not found + v = func(*args, **kwargs) + try: + cache[k] = v + except ValueError: + pass # value too large + return v + + else: + + def wrapper(*args, **kwargs): + k = key(*args, **kwargs) + try: + with lock: + return cache[k] + except KeyError: + pass # key not found + v = func(*args, **kwargs) + # in case of a race, prefer the item already in the cache + try: + with lock: + return cache.setdefault(k, v) + except ValueError: + return v # value too large + + return functools.update_wrapper(wrapper, func) + + return decorator + + +def cachedmethod(cache, key=hashkey, lock=None): + """Decorator to wrap a class or instance method with a memoizing + callable that saves results in a cache. + + """ + + def decorator(method): + if lock is None: + + def wrapper(self, *args, **kwargs): + c = cache(self) + if c is None: + return method(self, *args, **kwargs) + k = key(*args, **kwargs) + try: + return c[k] + except KeyError: + pass # key not found + v = method(self, *args, **kwargs) + try: + c[k] = v + except ValueError: + pass # value too large + return v + + else: + + def wrapper(self, *args, **kwargs): + c = cache(self) + if c is None: + return method(self, *args, **kwargs) + k = key(*args, **kwargs) + try: + with lock(self): + return c[k] + except KeyError: + pass # key not found + v = method(self, *args, **kwargs) + # in case of a race, prefer the item already in the cache + try: + with lock(self): + return c.setdefault(k, v) + except ValueError: + return v # value too large + + return functools.update_wrapper(wrapper, method) + + return decorator diff --git a/.venv/lib/python3.9/site-packages/cachetools/cache.py b/.venv/lib/python3.9/site-packages/cachetools/cache.py new file mode 100644 index 0000000..8c9dfd7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/cache.py @@ -0,0 +1,7 @@ +import warnings + +from . import Cache + +warnings.warn( + "cachetools.cache is deprecated, please use cachetools.Cache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/cachetools/fifo.py b/.venv/lib/python3.9/site-packages/cachetools/fifo.py new file mode 100644 index 0000000..ec072cd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/fifo.py @@ -0,0 +1,7 @@ +import warnings + +from . import FIFOCache + +warnings.warn( + "cachetools.fifo is deprecated, please use cachetools.FIFOCache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/cachetools/func.py b/.venv/lib/python3.9/site-packages/cachetools/func.py new file mode 100644 index 0000000..01702c2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/func.py @@ -0,0 +1,171 @@ +"""`functools.lru_cache` compatible memoizing function decorators.""" + +__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache") + +import collections +import functools +import math +import random +import time + +try: + from threading import RLock +except ImportError: # pragma: no cover + from dummy_threading import RLock + +from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache +from . import keys + + +_CacheInfo = collections.namedtuple( + "CacheInfo", ["hits", "misses", "maxsize", "currsize"] +) + + +class _UnboundCache(dict): + @property + def maxsize(self): + return None + + @property + def currsize(self): + return len(self) + + +class _UnboundTTLCache(TTLCache): + def __init__(self, ttl, timer): + TTLCache.__init__(self, math.inf, ttl, timer) + + @property + def maxsize(self): + return None + + +def _cache(cache, typed): + maxsize = cache.maxsize + + def decorator(func): + key = keys.typedkey if typed else keys.hashkey + lock = RLock() + stats = [0, 0] + + def wrapper(*args, **kwargs): + k = key(*args, **kwargs) + with lock: + try: + v = cache[k] + stats[0] += 1 + return v + except KeyError: + stats[1] += 1 + v = func(*args, **kwargs) + # in case of a race, prefer the item already in the cache + try: + with lock: + return cache.setdefault(k, v) + except ValueError: + return v # value too large + + def cache_info(): + with lock: + hits, misses = stats + maxsize = cache.maxsize + currsize = cache.currsize + return _CacheInfo(hits, misses, maxsize, currsize) + + def cache_clear(): + with lock: + try: + cache.clear() + finally: + stats[:] = [0, 0] + + wrapper.cache_info = cache_info + wrapper.cache_clear = cache_clear + wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed} + functools.update_wrapper(wrapper, func) + return wrapper + + return decorator + + +def fifo_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a First In First Out (FIFO) + algorithm. + + """ + if maxsize is None: + return _cache(_UnboundCache(), typed) + elif callable(maxsize): + return _cache(FIFOCache(128), typed)(maxsize) + else: + return _cache(FIFOCache(maxsize), typed) + + +def lfu_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Least Frequently Used (LFU) + algorithm. + + """ + if maxsize is None: + return _cache(_UnboundCache(), typed) + elif callable(maxsize): + return _cache(LFUCache(128), typed)(maxsize) + else: + return _cache(LFUCache(maxsize), typed) + + +def lru_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Least Recently Used (LRU) + algorithm. + + """ + if maxsize is None: + return _cache(_UnboundCache(), typed) + elif callable(maxsize): + return _cache(LRUCache(128), typed)(maxsize) + else: + return _cache(LRUCache(maxsize), typed) + + +def mru_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Most Recently Used (MRU) + algorithm. + """ + if maxsize is None: + return _cache(_UnboundCache(), typed) + elif callable(maxsize): + return _cache(MRUCache(128), typed)(maxsize) + else: + return _cache(MRUCache(maxsize), typed) + + +def rr_cache(maxsize=128, choice=random.choice, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Random Replacement (RR) + algorithm. + + """ + if maxsize is None: + return _cache(_UnboundCache(), typed) + elif callable(maxsize): + return _cache(RRCache(128, choice), typed)(maxsize) + else: + return _cache(RRCache(maxsize, choice), typed) + + +def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Least Recently Used (LRU) + algorithm with a per-item time-to-live (TTL) value. + """ + if maxsize is None: + return _cache(_UnboundTTLCache(ttl, timer), typed) + elif callable(maxsize): + return _cache(TTLCache(128, ttl, timer), typed)(maxsize) + else: + return _cache(TTLCache(maxsize, ttl, timer), typed) diff --git a/.venv/lib/python3.9/site-packages/cachetools/keys.py b/.venv/lib/python3.9/site-packages/cachetools/keys.py new file mode 100644 index 0000000..13630a4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/keys.py @@ -0,0 +1,52 @@ +"""Key functions for memoizing decorators.""" + +__all__ = ("hashkey", "typedkey") + + +class _HashedTuple(tuple): + """A tuple that ensures that hash() will be called no more than once + per element, since cache decorators will hash the key multiple + times on a cache miss. See also _HashedSeq in the standard + library functools implementation. + + """ + + __hashvalue = None + + def __hash__(self, hash=tuple.__hash__): + hashvalue = self.__hashvalue + if hashvalue is None: + self.__hashvalue = hashvalue = hash(self) + return hashvalue + + def __add__(self, other, add=tuple.__add__): + return _HashedTuple(add(self, other)) + + def __radd__(self, other, add=tuple.__add__): + return _HashedTuple(add(other, self)) + + def __getstate__(self): + return {} + + +# used for separating keyword arguments; we do not use an object +# instance here so identity is preserved when pickling/unpickling +_kwmark = (_HashedTuple,) + + +def hashkey(*args, **kwargs): + """Return a cache key for the specified hashable arguments.""" + + if kwargs: + return _HashedTuple(args + sum(sorted(kwargs.items()), _kwmark)) + else: + return _HashedTuple(args) + + +def typedkey(*args, **kwargs): + """Return a typed cache key for the specified hashable arguments.""" + + key = hashkey(*args, **kwargs) + key += tuple(type(v) for v in args) + key += tuple(type(v) for _, v in sorted(kwargs.items())) + return key diff --git a/.venv/lib/python3.9/site-packages/cachetools/lfu.py b/.venv/lib/python3.9/site-packages/cachetools/lfu.py new file mode 100644 index 0000000..44514ac --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/lfu.py @@ -0,0 +1,7 @@ +import warnings + +from . import LFUCache + +warnings.warn( + "cachetools.lfu is deprecated, please use cachetools.LFUCache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/cachetools/lru.py b/.venv/lib/python3.9/site-packages/cachetools/lru.py new file mode 100644 index 0000000..5d557b0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/lru.py @@ -0,0 +1,7 @@ +import warnings + +from . import LRUCache + +warnings.warn( + "cachetools.lru is deprecated, please use cachetools.LRUCache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/cachetools/mru.py b/.venv/lib/python3.9/site-packages/cachetools/mru.py new file mode 100644 index 0000000..0714bdc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/mru.py @@ -0,0 +1,7 @@ +import warnings + +from . import MRUCache + +warnings.warn( + "cachetools.mru is deprecated, please use cachetools.MRUCache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/cachetools/rr.py b/.venv/lib/python3.9/site-packages/cachetools/rr.py new file mode 100644 index 0000000..f49e185 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/rr.py @@ -0,0 +1,7 @@ +import warnings + +from . import RRCache + +warnings.warn( + "cachetools.rr is deprecated, please use cachetools.RRCache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/cachetools/ttl.py b/.venv/lib/python3.9/site-packages/cachetools/ttl.py new file mode 100644 index 0000000..d96b677 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cachetools/ttl.py @@ -0,0 +1,7 @@ +import warnings + +from . import TTLCache + +warnings.warn( + "cachetools.ttl is deprecated, please use cachetools.TTLCache", DeprecationWarning +) diff --git a/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/LICENSE new file mode 100644 index 0000000..c2fda9a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/METADATA b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/METADATA new file mode 100644 index 0000000..7a6860d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/METADATA @@ -0,0 +1,83 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2021.10.8 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://certifiio.readthedocs.io/en/latest/ +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/ +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 + +Certifi: Python SSL Certificates +================================ + +`Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/ +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/RECORD b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/RECORD new file mode 100644 index 0000000..f8b88d1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/RECORD @@ -0,0 +1,13 @@ +certifi-2021.10.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2021.10.8.dist-info/LICENSE,sha256=vp2C82ES-Hp_HXTs1Ih-FGe7roh4qEAEoAEXseR1o-I,1049 +certifi-2021.10.8.dist-info/METADATA,sha256=iB_zbT1uX_8_NC7iGv0YEB-9b3idhQwHrFTSq8R1kD8,2994 +certifi-2021.10.8.dist-info/RECORD,, +certifi-2021.10.8.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +certifi-2021.10.8.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=xWdRgntT3j1V95zkRipGOg_A1UfEju2FcpujhysZLRI,62 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-39.pyc,, +certifi/__pycache__/__main__.cpython-39.pyc,, +certifi/__pycache__/core.cpython-39.pyc,, +certifi/cacert.pem,sha256=-og4Keu4zSpgL5shwfhd4kz0eUnVILzrGCi0zRy2kGw,265969 +certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303 diff --git a/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/WHEEL new file mode 100644 index 0000000..6d38aa0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi-2021.10.8.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/.venv/lib/python3.9/site-packages/certifi/__init__.py b/.venv/lib/python3.9/site-packages/certifi/__init__.py new file mode 100644 index 0000000..8db1a0e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import contents, where + +__version__ = "2021.10.08" diff --git a/.venv/lib/python3.9/site-packages/certifi/__main__.py b/.venv/lib/python3.9/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/.venv/lib/python3.9/site-packages/certifi/cacert.pem b/.venv/lib/python3.9/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..6d0ccc0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi/cacert.pem @@ -0,0 +1,4362 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- diff --git a/.venv/lib/python3.9/site-packages/certifi/core.py b/.venv/lib/python3.9/site-packages/certifi/core.py new file mode 100644 index 0000000..5d2b8cd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/certifi/core.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os + +try: + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/LICENSE new file mode 100644 index 0000000..29225ee --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/LICENSE @@ -0,0 +1,26 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/METADATA new file mode 100644 index 0000000..b708a09 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/METADATA @@ -0,0 +1,37 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.15.0 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: MIT License +License-File: LICENSE +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/RECORD new file mode 100644 index 0000000..332c504 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/RECORD @@ -0,0 +1,44 @@ +_cffi_backend.cpython-39-darwin.so,sha256=99hBWvHRpYJ9S3sR2sU8kloE3epgG8mUWv-t70ulnQY,202488 +cffi-1.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-1.15.0.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 +cffi-1.15.0.dist-info/METADATA,sha256=eo1521k3Cf0aEkta5nFxpMpLkd-kufSatsH1IAJqd-Y,1164 +cffi-1.15.0.dist-info/RECORD,, +cffi-1.15.0.dist-info/WHEEL,sha256=JIE30nfOWUuazI4Vcfiuv_cYm-SkZCh6YOqQQjhm90A,109 +cffi-1.15.0.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76 +cffi-1.15.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=Aen4-pzAyn3fJtNenOUS8UKh9UIMqvznzJl1aJC8rEQ,513 +cffi/__pycache__/__init__.cpython-39.pyc,, +cffi/__pycache__/api.cpython-39.pyc,, +cffi/__pycache__/backend_ctypes.cpython-39.pyc,, +cffi/__pycache__/cffi_opcode.cpython-39.pyc,, +cffi/__pycache__/commontypes.cpython-39.pyc,, +cffi/__pycache__/cparser.cpython-39.pyc,, +cffi/__pycache__/error.cpython-39.pyc,, +cffi/__pycache__/ffiplatform.cpython-39.pyc,, +cffi/__pycache__/lock.cpython-39.pyc,, +cffi/__pycache__/model.cpython-39.pyc,, +cffi/__pycache__/pkgconfig.cpython-39.pyc,, +cffi/__pycache__/recompiler.cpython-39.pyc,, +cffi/__pycache__/setuptools_ext.cpython-39.pyc,, +cffi/__pycache__/vengine_cpy.cpython-39.pyc,, +cffi/__pycache__/vengine_gen.cpython-39.pyc,, +cffi/__pycache__/verifier.cpython-39.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=tKnA1rdSoPHp23FnDL1mDGwFo-Uj6fXfA6vA6kcoEUc,14800 +cffi/_embedding.h,sha256=fw4QAZY0CtulrPth_owWtl3I6l9F7ElloEmuKxs97CY,17581 +cffi/api.py,sha256=yxJalIePbr1mz_WxAHokSwyP5CVYde44m-nolHnbJNo,42064 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724 +cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689 +cffi/cparser.py,sha256=rO_1pELRw1gI1DE1m4gi2ik5JMfpxouAACLXpRPlVEA,44231 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=HMXqR8ks2wtdsNxGaWpQ_PyqIvtiuos_vf1qKCy-cwg,4046 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=_GH_UF1Rn9vC4AvmgJm6qj7RUXXG3eqKPc8bPxxyBKE,21768 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=7OBdKr0dAzRnEbgQvjCAikoFAygjTvitaJHdRGc1k24,64568 +cffi/setuptools_ext.py,sha256=RUR17N5f8gpiQBBlXL34P9FtOu1mhHIaAf3WJlg5S4I,8931 +cffi/vengine_cpy.py,sha256=YglN8YS-UaHEv2k2cxgotNWE87dHX20-68EyKoiKUYA,43320 +cffi/vengine_gen.py,sha256=5dX7s1DU6pTBOMI6oTVn_8Bnmru_lj932B6b4v29Hlg,26684 +cffi/verifier.py,sha256=ESwuXWXtXrKEagCKveLRDjFzLNCyaKdqAgAlKREcyhY,11253 diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/WHEEL new file mode 100644 index 0000000..9c3644e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp39-cp39-macosx_10_9_x86_64 + diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/entry_points.txt new file mode 100644 index 0000000..eee7e0f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-1.15.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/.venv/lib/python3.9/site-packages/cffi/__init__.py b/.venv/lib/python3.9/site-packages/cffi/__init__.py new file mode 100644 index 0000000..82a9618 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.15.0" +__version_info__ = (1, 15, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/.venv/lib/python3.9/site-packages/cffi/_cffi_errors.h b/.venv/lib/python3.9/site-packages/cffi/_cffi_errors.h new file mode 100644 index 0000000..158e059 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/_cffi_include.h b/.venv/lib/python3.9/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..e4c0a67 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_cffi_include.h @@ -0,0 +1,385 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/_embedding.h b/.venv/lib/python3.9/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..e863d85 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_embedding.h @@ -0,0 +1,527 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.15.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = -42; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value == locked_value); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/api.py b/.venv/lib/python3.9/site-packages/cffi/api.py new file mode 100644 index 0000000..999a8ae --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/api.py @@ -0,0 +1,965 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/.venv/lib/python3.9/site-packages/cffi/backend_ctypes.py b/.venv/lib/python3.9/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..e7956a7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/.venv/lib/python3.9/site-packages/cffi/cffi_opcode.py b/.venv/lib/python3.9/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..a0df98d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/.venv/lib/python3.9/site-packages/cffi/commontypes.py b/.venv/lib/python3.9/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..8ec97c7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/.venv/lib/python3.9/site-packages/cffi/cparser.py b/.venv/lib/python3.9/site-packages/cffi/cparser.py new file mode 100644 index 0000000..74830e9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/cparser.py @@ -0,0 +1,1006 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/.venv/lib/python3.9/site-packages/cffi/error.py b/.venv/lib/python3.9/site-packages/cffi/error.py new file mode 100644 index 0000000..0a27247 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/.venv/lib/python3.9/site-packages/cffi/ffiplatform.py b/.venv/lib/python3.9/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..8531346 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/ffiplatform.py @@ -0,0 +1,127 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + _hack_at_distutils() + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + _hack_at_distutils() + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass diff --git a/.venv/lib/python3.9/site-packages/cffi/lock.py b/.venv/lib/python3.9/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/.venv/lib/python3.9/site-packages/cffi/model.py b/.venv/lib/python3.9/site-packages/cffi/model.py new file mode 100644 index 0000000..ad1c176 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/model.py @@ -0,0 +1,617 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/.venv/lib/python3.9/site-packages/cffi/parse_c_type.h b/.venv/lib/python3.9/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..84e4ef8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/pkgconfig.py b/.venv/lib/python3.9/site-packages/cffi/pkgconfig.py new file mode 100644 index 0000000..5c93f15 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/.venv/lib/python3.9/site-packages/cffi/recompiler.py b/.venv/lib/python3.9/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..86b37d7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/recompiler.py @@ -0,0 +1,1581 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/.venv/lib/python3.9/site-packages/cffi/setuptools_ext.py b/.venv/lib/python3.9/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..8fe3614 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,219 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/.venv/lib/python3.9/site-packages/cffi/vengine_cpy.py b/.venv/lib/python3.9/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..6de0df0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1076 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model +from .error import VerificationError + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/.venv/lib/python3.9/site-packages/cffi/vengine_gen.py b/.venv/lib/python3.9/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..2642152 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/.venv/lib/python3.9/site-packages/cffi/verifier.py b/.venv/lib/python3.9/site-packages/cffi/verifier.py new file mode 100644 index 0000000..a500c78 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/verifier.py @@ -0,0 +1,307 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + ffiplatform._hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/LICENSE new file mode 100644 index 0000000..ad82355 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/METADATA b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/METADATA new file mode 100644 index 0000000..1b04ed4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/METADATA @@ -0,0 +1,269 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 2.0.12 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/ousret/charset_normalizer +Author: Ahmed TAHRI @Ousret +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,i18n,txt,text,charset,charset-detector,normalization,unicode,chardet +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.5.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport +Requires-Dist: unicodedata2 ; extra == 'unicode_backport' + + +

Charset Detection, for Everyone 👋

+ +

+ The Real First Universal Charset Detector
+ + + + + + + + Download Count Total + +

+ +> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

+ +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +| ------------- | :-------------: | :------------------: | :------------------: | +| `Fast` | ❌
| ✅
| ✅
| +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `Free & Open` | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1 | MIT | MPL-1.1 +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `Supported Encoding` | 30 | :tada: [93](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 + +

+Reading Normalized TextCat Reading Text + +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⭐ Your support + +*Fork, test-it, star-it, submit your ideas! We do listen.* + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 92 % | 220 ms | 5 file/sec | +| charset-normalizer | **98 %** | **40 ms** | 25 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 1115 ms | 300 ms | 27 ms | +| charset-normalizer | 460 ms | 240 ms | 18 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. + +[cchardet](https://github.com/PyYoshi/cChardet) is a non-native (cpp binding) and unmaintained faster alternative with +a better accuracy than chardet but lower than this package. If speed is the most important factor, you should try it. + +## ✨ Installation + +Using PyPi for latest stable +```sh +pip install charset-normalizer -U +``` + +If you want a more up-to-date `unicodedata` than the one available in your Python setup. +```sh +pip install charset-normalizer[unicode_backport] -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Normalize any text file* +```python +from charset_normalizer import normalize +try: + normalize('./my_subtitle.srt') # should write to disk my_subtitle-***.srt +except IOError as e: + print('Sadly, we are unable to perform charset normalization.', str(e)) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure chaos, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is chaos/mess and coherence according to **YOU ?** + +*Chaos :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is chaotic is very subjective, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © 2019 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + + diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/RECORD b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/RECORD new file mode 100644 index 0000000..6a08344 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/RECORD @@ -0,0 +1,33 @@ +../../../bin/normalizer,sha256=qFKkojBq4XvONBLmfzQiZBQ-lR2zQoVc40xq7GVJhLc,297 +charset_normalizer-2.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-2.0.12.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-2.0.12.dist-info/METADATA,sha256=eX-U3s7nb6wcvXZFyM1mdBf1yz4I0msVBgNvLEscAbo,11713 +charset_normalizer-2.0.12.dist-info/RECORD,, +charset_normalizer-2.0.12.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +charset_normalizer-2.0.12.dist-info/entry_points.txt,sha256=5AJq_EPtGGUwJPgQLnBZfbVr-FYCIwT0xP7dIEZO3NI,77 +charset_normalizer-2.0.12.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=x2A2OW29MBcqdxsvy6t1wzkUlH3ma0guxL6ZCfS8J94,1790 +charset_normalizer/__pycache__/__init__.cpython-39.pyc,, +charset_normalizer/__pycache__/api.cpython-39.pyc,, +charset_normalizer/__pycache__/cd.cpython-39.pyc,, +charset_normalizer/__pycache__/constant.cpython-39.pyc,, +charset_normalizer/__pycache__/legacy.cpython-39.pyc,, +charset_normalizer/__pycache__/md.cpython-39.pyc,, +charset_normalizer/__pycache__/models.cpython-39.pyc,, +charset_normalizer/__pycache__/utils.cpython-39.pyc,, +charset_normalizer/__pycache__/version.cpython-39.pyc,, +charset_normalizer/api.py,sha256=r__Wz85F5pYOkRwEY5imXY_pCZ2Nil1DkdaAJY7T5o0,20303 +charset_normalizer/assets/__init__.py,sha256=FPnfk8limZRb8ZIUQcTvPEcbuM1eqOdWGw0vbWGycDs,25485 +charset_normalizer/assets/__pycache__/__init__.cpython-39.pyc,, +charset_normalizer/cd.py,sha256=a9Kzzd9tHl_W08ExbCFMmRJqdo2k7EBQ8Z_3y9DmYsg,11076 +charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/cli/__pycache__/__init__.cpython-39.pyc,, +charset_normalizer/cli/__pycache__/normalizer.cpython-39.pyc,, +charset_normalizer/cli/normalizer.py,sha256=LkeFIRc1l28rOgXpEby695x0bcKQv4D8z9FmA3Z2c3A,9364 +charset_normalizer/constant.py,sha256=51u_RS10I1vYVpBao__xHqf--HHNrR6me1A1se5r5Y0,19449 +charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384 +charset_normalizer/md.py,sha256=WEwnu2MyIiMeEaorRduqcTxGjIBclWIG3i-9_UL6LLs,18191 +charset_normalizer/models.py,sha256=XrGpVxfonhcilIWC1WeiP3-ZORGEe_RG3sgrfPLl9qM,13303 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=AWSL0z1B42IwdLfjX4ZMASA9cTUsTp0PweCdW98SI-4,9308 +charset_normalizer/version.py,sha256=uxO2cT0YIavQv4dQlNGmHPIOOwOa-exspxXi3IR7dck,80 diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/entry_points.txt new file mode 100644 index 0000000..a67f60b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +normalizer = charset_normalizer.cli.normalizer:cli_detect + diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/top_level.txt new file mode 100644 index 0000000..66958f0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer-2.0.12.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/__init__.py b/.venv/lib/python3.9/site-packages/charset_normalizer/__init__.py new file mode 100644 index 0000000..1aea851 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf_8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, normalize +from .legacy import ( + CharsetDetector, + CharsetDoctor, + CharsetNormalizerMatch, + CharsetNormalizerMatches, + detect, +) +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "normalize", + "detect", + "CharsetMatch", + "CharsetMatches", + "CharsetNormalizerMatch", + "CharsetNormalizerMatches", + "CharsetDetector", + "CharsetDoctor", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/api.py b/.venv/lib/python3.9/site-packages/charset_normalizer/api.py new file mode 100644 index 0000000..bdc8ed9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/api.py @@ -0,0 +1,608 @@ +import logging +from os.path import basename, splitext +from typing import BinaryIO, List, Optional, Set + +try: + from os import PathLike +except ImportError: # pragma: no cover + PathLike = str # type: ignore + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocs of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level = logger.level # type: int + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length = len(sequences) # type: int + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence = len(sequences) < TOO_SMALL_SEQUENCE # type: bool + is_too_large_sequence = len(sequences) >= TOO_BIG_SEQUENCE # type: bool + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings = [] # type: List[str] + + specified_encoding = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) # type: Optional[str] + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested = set() # type: Set[str] + tested_but_hard_failure = [] # type: List[str] + tested_but_soft_failure = [] # type: List[str] + + fallback_ascii = None # type: Optional[CharsetMatch] + fallback_u8 = None # type: Optional[CharsetMatch] + fallback_specified = None # type: Optional[CharsetMatch] + + results = CharsetMatches() # type: CharsetMatches + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload = None # type: Optional[str] + bom_or_sig_available = sig_encoding == encoding_iana # type: bool + strip_sig_or_bom = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) # type: bool + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder = is_multi_byte_encoding(encoding_iana) # type: bool + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test = False # type: bool + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) # type: bool + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up = int(len(r_) / 4) # type: int + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count = 0 # type: int + lazy_str_hard_failure = False + + md_chunks = [] # type: List[str] + md_ratios = [] + + for i in r_: + if i + chunk_size > length + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + try: + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) # type: str + except UnicodeDecodeError as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + break + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0 and sequences[i] >= 0x80: + + chunk_partial_size_chk = min(chunk_size, 16) # type: int + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + md_chunks.append(chunk) + + md_ratios.append(mess_ratio(chunk, threshold)) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio = ( + sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + ) # type: float + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages = encoding_languages(encoding_iana) # type: List[str] + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, 0.1, ",".join(target_languages) if target_languages else None + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def from_path( + path: PathLike, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def normalize( + path: PathLike, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, +) -> CharsetMatch: + """ + Take a (text-based) file path and try to create another file next to it, this time using UTF-8. + """ + results = from_path( + path, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + ) + + filename = basename(path) + target_extensions = list(splitext(filename)) + + if len(results) == 0: + raise IOError( + 'Unable to normalize "{}", no encoding charset seems to fit.'.format( + filename + ) + ) + + result = results.best() + + target_extensions[0] += "-" + result.encoding # type: ignore + + with open( + "{}".format(str(path).replace(filename, "".join(target_extensions))), "wb" + ) as fp: + fp.write(result.output()) # type: ignore + + return result # type: ignore diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py b/.venv/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py new file mode 100644 index 0000000..b2e56ff --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py @@ -0,0 +1,1244 @@ +# -*- coding: utf_8 -*- +from collections import OrderedDict + +FREQUENCIES = OrderedDict( + [ + ( + "English", + [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + ), + ( + "German", + [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + ), + ( + "French", + [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + ), + ( + "Dutch", + [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + ), + ( + "Italian", + [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + ), + ( + "Polish", + [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + ), + ( + "Spanish", + [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + ), + ( + "Russian", + [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + ), + ( + "Japanese", + [ + "の", + "に", + "る", + "た", + "は", + "ー", + "と", + "し", + "を", + "で", + "て", + "が", + "い", + "ン", + "れ", + "な", + "年", + "ス", + "っ", + "ル", + "か", + "ら", + "あ", + "さ", + "も", + "り", + ], + ), + ( + "Portuguese", + [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + ), + ( + "Swedish", + [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + ), + ( + "Chinese", + [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + ], + ), + ( + "Ukrainian", + [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + ), + ( + "Norwegian", + [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + ), + ( + "Finnish", + [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + ), + ( + "Vietnamese", + [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + ), + ( + "Czech", + [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + ), + ( + "Hungarian", + [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + ), + ( + "Korean", + [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + ), + ( + "Indonesian", + [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + ), + ( + "Turkish", + [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + ), + ( + "Romanian", + [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + ), + ( + "Farsi", + [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + ), + ( + "Arabic", + [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + ), + ( + "Danish", + [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + ), + ( + "Serbian", + [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + ), + ( + "Lithuanian", + [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + ), + ( + "Slovene", + [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + ), + ( + "Slovak", + [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + ), + ( + "Hebrew", + [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + ), + ( + "Bulgarian", + [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + ), + ( + "Croatian", + [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + ), + ( + "Hindi", + [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + ), + ( + "Estonian", + [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + ), + ( + "Simple English", + [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + ), + ( + "Thai", + [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + ), + ( + "Greek", + [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + ), + ( + "Tamil", + [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + ), + ( + "Classical Chinese", + [ + "之", + "年", + "為", + "也", + "以", + "一", + "人", + "其", + "者", + "國", + "有", + "二", + "十", + "於", + "曰", + "三", + "不", + "大", + "而", + "子", + "中", + "五", + "四", + ], + ), + ( + "Kazakh", + [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], + ), + ] +) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/cd.py b/.venv/lib/python3.9/site-packages/charset_normalizer/cd.py new file mode 100644 index 0000000..8429a0e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/cd.py @@ -0,0 +1,340 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter, OrderedDict +from functools import lru_cache +from typing import Dict, List, Optional, Tuple + +from .assets import FREQUENCIES +from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module("encodings.{}".format(iana_name)).IncrementalDecoder # type: ignore + + p = decoder(errors="ignore") # type: IncrementalDecoder + seen_ranges = {} # type: Dict[str, int] + character_count = 0 # type: int + + for i in range(0x40, 0xFF): + chunk = p.decode(bytes([i])) # type: str + + if chunk: + character_range = unicode_range(chunk) # type: Optional[str] + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages = [] # type: List[str] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges = encoding_unicode_range(iana_name) # type: List[str] + primary_range = None # type: Optional[str] + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese", "Classical Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents = False # type: bool + target_pure_latin = True # type: bool + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages = [] # type: List[Tuple[str, float]] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count = len(language_characters) # type: int + + character_match_count = len( + [c for c in language_characters if c in characters] + ) # type: int + + ratio = character_match_count / character_count # type: float + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count = 0 # type: int + + for character in ordered_characters: + if character not in FREQUENCIES[language]: + continue + + characters_before_source = FREQUENCIES[language][ + 0 : FREQUENCIES[language].index(character) + ] # type: List[str] + characters_after_source = FREQUENCIES[language][ + FREQUENCIES[language].index(character) : + ] # type: List[str] + + characters_before = ordered_characters[ + 0 : ordered_characters.index(character) + ] # type: List[str] + characters_after = ordered_characters[ + ordered_characters.index(character) : + ] # type: List[str] + + before_match_count = [ + e in characters_before for e in characters_before_source + ].count( + True + ) # type: int + after_match_count = [ + e in characters_after for e in characters_after_source + ].count( + True + ) # type: int + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers = OrderedDict() # type: Dict[str, str] + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + continue + + layer_target_range = None # type: Optional[str] + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios = OrderedDict() # type: Dict[str, List[float]] + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results = [] # type: List[Tuple[str, float]] + ignore_non_latin = False # type: bool + + sufficient_match_count = 0 # type: int + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies = Counter(layer) # type: Counter + most_common = sequence_frequencies.most_common() + + character_count = sum(o for c, o in most_common) # type: int + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered = [c for c, o in most_common] # type: List[str] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio = characters_popularity_compare( + language, popular_character_ordered + ) # type: float + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted(results, key=lambda x: x[1], reverse=True) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/cli/__init__.py b/.venv/lib/python3.9/site-packages/charset_normalizer/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py b/.venv/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py new file mode 100644 index 0000000..5f912c9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py @@ -0,0 +1,290 @@ +import argparse +import sys +from json import dumps +from os.path import abspath +from platform import python_version +from typing import List + +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: List[str] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.1, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {}".format( + __version__, python_version() + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + o_ = my_file.name.split(".") # type: List[str] + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = abspath("./{}".format(".".join(o_))) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/constant.py b/.venv/lib/python3.9/site-packages/charset_normalizer/constant.py new file mode 100644 index 0000000..c32f5cf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/constant.py @@ -0,0 +1,503 @@ +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from collections import OrderedDict +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +from .assets import FREQUENCIES + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS = OrderedDict( + [ + ("utf_8", BOM_UTF8), + ( + "utf_7", + [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + ), + ("gb18030", b"\x84\x31\x95\x33"), + ("utf_32", [BOM_UTF32_BE, BOM_UTF32_LE]), + ("utf_16", [BOM_UTF16_BE, BOM_UTF16_LE]), + ] +) # type: Dict[str, Union[bytes, List[bytes]]] + +TOO_SMALL_SEQUENCE = 32 # type: int +TOO_BIG_SEQUENCE = int(10e6) # type: int + +UTF8_MAXIMAL_ALLOCATION = 1112064 # type: int + +UNICODE_RANGES_COMBINED = { + "Control character": range(31 + 1), + "Basic Latin": range(32, 127 + 1), + "Latin-1 Supplement": range(128, 255 + 1), + "Latin Extended-A": range(256, 383 + 1), + "Latin Extended-B": range(384, 591 + 1), + "IPA Extensions": range(592, 687 + 1), + "Spacing Modifier Letters": range(688, 767 + 1), + "Combining Diacritical Marks": range(768, 879 + 1), + "Greek and Coptic": range(880, 1023 + 1), + "Cyrillic": range(1024, 1279 + 1), + "Cyrillic Supplement": range(1280, 1327 + 1), + "Armenian": range(1328, 1423 + 1), + "Hebrew": range(1424, 1535 + 1), + "Arabic": range(1536, 1791 + 1), + "Syriac": range(1792, 1871 + 1), + "Arabic Supplement": range(1872, 1919 + 1), + "Thaana": range(1920, 1983 + 1), + "NKo": range(1984, 2047 + 1), + "Samaritan": range(2048, 2111 + 1), + "Mandaic": range(2112, 2143 + 1), + "Syriac Supplement": range(2144, 2159 + 1), + "Arabic Extended-A": range(2208, 2303 + 1), + "Devanagari": range(2304, 2431 + 1), + "Bengali": range(2432, 2559 + 1), + "Gurmukhi": range(2560, 2687 + 1), + "Gujarati": range(2688, 2815 + 1), + "Oriya": range(2816, 2943 + 1), + "Tamil": range(2944, 3071 + 1), + "Telugu": range(3072, 3199 + 1), + "Kannada": range(3200, 3327 + 1), + "Malayalam": range(3328, 3455 + 1), + "Sinhala": range(3456, 3583 + 1), + "Thai": range(3584, 3711 + 1), + "Lao": range(3712, 3839 + 1), + "Tibetan": range(3840, 4095 + 1), + "Myanmar": range(4096, 4255 + 1), + "Georgian": range(4256, 4351 + 1), + "Hangul Jamo": range(4352, 4607 + 1), + "Ethiopic": range(4608, 4991 + 1), + "Ethiopic Supplement": range(4992, 5023 + 1), + "Cherokee": range(5024, 5119 + 1), + "Unified Canadian Aboriginal Syllabics": range(5120, 5759 + 1), + "Ogham": range(5760, 5791 + 1), + "Runic": range(5792, 5887 + 1), + "Tagalog": range(5888, 5919 + 1), + "Hanunoo": range(5920, 5951 + 1), + "Buhid": range(5952, 5983 + 1), + "Tagbanwa": range(5984, 6015 + 1), + "Khmer": range(6016, 6143 + 1), + "Mongolian": range(6144, 6319 + 1), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6399 + 1), + "Limbu": range(6400, 6479 + 1), + "Tai Le": range(6480, 6527 + 1), + "New Tai Lue": range(6528, 6623 + 1), + "Khmer Symbols": range(6624, 6655 + 1), + "Buginese": range(6656, 6687 + 1), + "Tai Tham": range(6688, 6831 + 1), + "Combining Diacritical Marks Extended": range(6832, 6911 + 1), + "Balinese": range(6912, 7039 + 1), + "Sundanese": range(7040, 7103 + 1), + "Batak": range(7104, 7167 + 1), + "Lepcha": range(7168, 7247 + 1), + "Ol Chiki": range(7248, 7295 + 1), + "Cyrillic Extended C": range(7296, 7311 + 1), + "Sundanese Supplement": range(7360, 7375 + 1), + "Vedic Extensions": range(7376, 7423 + 1), + "Phonetic Extensions": range(7424, 7551 + 1), + "Phonetic Extensions Supplement": range(7552, 7615 + 1), + "Combining Diacritical Marks Supplement": range(7616, 7679 + 1), + "Latin Extended Additional": range(7680, 7935 + 1), + "Greek Extended": range(7936, 8191 + 1), + "General Punctuation": range(8192, 8303 + 1), + "Superscripts and Subscripts": range(8304, 8351 + 1), + "Currency Symbols": range(8352, 8399 + 1), + "Combining Diacritical Marks for Symbols": range(8400, 8447 + 1), + "Letterlike Symbols": range(8448, 8527 + 1), + "Number Forms": range(8528, 8591 + 1), + "Arrows": range(8592, 8703 + 1), + "Mathematical Operators": range(8704, 8959 + 1), + "Miscellaneous Technical": range(8960, 9215 + 1), + "Control Pictures": range(9216, 9279 + 1), + "Optical Character Recognition": range(9280, 9311 + 1), + "Enclosed Alphanumerics": range(9312, 9471 + 1), + "Box Drawing": range(9472, 9599 + 1), + "Block Elements": range(9600, 9631 + 1), + "Geometric Shapes": range(9632, 9727 + 1), + "Miscellaneous Symbols": range(9728, 9983 + 1), + "Dingbats": range(9984, 10175 + 1), + "Miscellaneous Mathematical Symbols-A": range(10176, 10223 + 1), + "Supplemental Arrows-A": range(10224, 10239 + 1), + "Braille Patterns": range(10240, 10495 + 1), + "Supplemental Arrows-B": range(10496, 10623 + 1), + "Miscellaneous Mathematical Symbols-B": range(10624, 10751 + 1), + "Supplemental Mathematical Operators": range(10752, 11007 + 1), + "Miscellaneous Symbols and Arrows": range(11008, 11263 + 1), + "Glagolitic": range(11264, 11359 + 1), + "Latin Extended-C": range(11360, 11391 + 1), + "Coptic": range(11392, 11519 + 1), + "Georgian Supplement": range(11520, 11567 + 1), + "Tifinagh": range(11568, 11647 + 1), + "Ethiopic Extended": range(11648, 11743 + 1), + "Cyrillic Extended-A": range(11744, 11775 + 1), + "Supplemental Punctuation": range(11776, 11903 + 1), + "CJK Radicals Supplement": range(11904, 12031 + 1), + "Kangxi Radicals": range(12032, 12255 + 1), + "Ideographic Description Characters": range(12272, 12287 + 1), + "CJK Symbols and Punctuation": range(12288, 12351 + 1), + "Hiragana": range(12352, 12447 + 1), + "Katakana": range(12448, 12543 + 1), + "Bopomofo": range(12544, 12591 + 1), + "Hangul Compatibility Jamo": range(12592, 12687 + 1), + "Kanbun": range(12688, 12703 + 1), + "Bopomofo Extended": range(12704, 12735 + 1), + "CJK Strokes": range(12736, 12783 + 1), + "Katakana Phonetic Extensions": range(12784, 12799 + 1), + "Enclosed CJK Letters and Months": range(12800, 13055 + 1), + "CJK Compatibility": range(13056, 13311 + 1), + "CJK Unified Ideographs Extension A": range(13312, 19903 + 1), + "Yijing Hexagram Symbols": range(19904, 19967 + 1), + "CJK Unified Ideographs": range(19968, 40959 + 1), + "Yi Syllables": range(40960, 42127 + 1), + "Yi Radicals": range(42128, 42191 + 1), + "Lisu": range(42192, 42239 + 1), + "Vai": range(42240, 42559 + 1), + "Cyrillic Extended-B": range(42560, 42655 + 1), + "Bamum": range(42656, 42751 + 1), + "Modifier Tone Letters": range(42752, 42783 + 1), + "Latin Extended-D": range(42784, 43007 + 1), + "Syloti Nagri": range(43008, 43055 + 1), + "Common Indic Number Forms": range(43056, 43071 + 1), + "Phags-pa": range(43072, 43135 + 1), + "Saurashtra": range(43136, 43231 + 1), + "Devanagari Extended": range(43232, 43263 + 1), + "Kayah Li": range(43264, 43311 + 1), + "Rejang": range(43312, 43359 + 1), + "Hangul Jamo Extended-A": range(43360, 43391 + 1), + "Javanese": range(43392, 43487 + 1), + "Myanmar Extended-B": range(43488, 43519 + 1), + "Cham": range(43520, 43615 + 1), + "Myanmar Extended-A": range(43616, 43647 + 1), + "Tai Viet": range(43648, 43743 + 1), + "Meetei Mayek Extensions": range(43744, 43775 + 1), + "Ethiopic Extended-A": range(43776, 43823 + 1), + "Latin Extended-E": range(43824, 43887 + 1), + "Cherokee Supplement": range(43888, 43967 + 1), + "Meetei Mayek": range(43968, 44031 + 1), + "Hangul Syllables": range(44032, 55215 + 1), + "Hangul Jamo Extended-B": range(55216, 55295 + 1), + "High Surrogates": range(55296, 56191 + 1), + "High Private Use Surrogates": range(56192, 56319 + 1), + "Low Surrogates": range(56320, 57343 + 1), + "Private Use Area": range(57344, 63743 + 1), + "CJK Compatibility Ideographs": range(63744, 64255 + 1), + "Alphabetic Presentation Forms": range(64256, 64335 + 1), + "Arabic Presentation Forms-A": range(64336, 65023 + 1), + "Variation Selectors": range(65024, 65039 + 1), + "Vertical Forms": range(65040, 65055 + 1), + "Combining Half Marks": range(65056, 65071 + 1), + "CJK Compatibility Forms": range(65072, 65103 + 1), + "Small Form Variants": range(65104, 65135 + 1), + "Arabic Presentation Forms-B": range(65136, 65279 + 1), + "Halfwidth and Fullwidth Forms": range(65280, 65519 + 1), + "Specials": range(65520, 65535 + 1), + "Linear B Syllabary": range(65536, 65663 + 1), + "Linear B Ideograms": range(65664, 65791 + 1), + "Aegean Numbers": range(65792, 65855 + 1), + "Ancient Greek Numbers": range(65856, 65935 + 1), + "Ancient Symbols": range(65936, 65999 + 1), + "Phaistos Disc": range(66000, 66047 + 1), + "Lycian": range(66176, 66207 + 1), + "Carian": range(66208, 66271 + 1), + "Coptic Epact Numbers": range(66272, 66303 + 1), + "Old Italic": range(66304, 66351 + 1), + "Gothic": range(66352, 66383 + 1), + "Old Permic": range(66384, 66431 + 1), + "Ugaritic": range(66432, 66463 + 1), + "Old Persian": range(66464, 66527 + 1), + "Deseret": range(66560, 66639 + 1), + "Shavian": range(66640, 66687 + 1), + "Osmanya": range(66688, 66735 + 1), + "Osage": range(66736, 66815 + 1), + "Elbasan": range(66816, 66863 + 1), + "Caucasian Albanian": range(66864, 66927 + 1), + "Linear A": range(67072, 67455 + 1), + "Cypriot Syllabary": range(67584, 67647 + 1), + "Imperial Aramaic": range(67648, 67679 + 1), + "Palmyrene": range(67680, 67711 + 1), + "Nabataean": range(67712, 67759 + 1), + "Hatran": range(67808, 67839 + 1), + "Phoenician": range(67840, 67871 + 1), + "Lydian": range(67872, 67903 + 1), + "Meroitic Hieroglyphs": range(67968, 67999 + 1), + "Meroitic Cursive": range(68000, 68095 + 1), + "Kharoshthi": range(68096, 68191 + 1), + "Old South Arabian": range(68192, 68223 + 1), + "Old North Arabian": range(68224, 68255 + 1), + "Manichaean": range(68288, 68351 + 1), + "Avestan": range(68352, 68415 + 1), + "Inscriptional Parthian": range(68416, 68447 + 1), + "Inscriptional Pahlavi": range(68448, 68479 + 1), + "Psalter Pahlavi": range(68480, 68527 + 1), + "Old Turkic": range(68608, 68687 + 1), + "Old Hungarian": range(68736, 68863 + 1), + "Rumi Numeral Symbols": range(69216, 69247 + 1), + "Brahmi": range(69632, 69759 + 1), + "Kaithi": range(69760, 69839 + 1), + "Sora Sompeng": range(69840, 69887 + 1), + "Chakma": range(69888, 69967 + 1), + "Mahajani": range(69968, 70015 + 1), + "Sharada": range(70016, 70111 + 1), + "Sinhala Archaic Numbers": range(70112, 70143 + 1), + "Khojki": range(70144, 70223 + 1), + "Multani": range(70272, 70319 + 1), + "Khudawadi": range(70320, 70399 + 1), + "Grantha": range(70400, 70527 + 1), + "Newa": range(70656, 70783 + 1), + "Tirhuta": range(70784, 70879 + 1), + "Siddham": range(71040, 71167 + 1), + "Modi": range(71168, 71263 + 1), + "Mongolian Supplement": range(71264, 71295 + 1), + "Takri": range(71296, 71375 + 1), + "Ahom": range(71424, 71487 + 1), + "Warang Citi": range(71840, 71935 + 1), + "Zanabazar Square": range(72192, 72271 + 1), + "Soyombo": range(72272, 72367 + 1), + "Pau Cin Hau": range(72384, 72447 + 1), + "Bhaiksuki": range(72704, 72815 + 1), + "Marchen": range(72816, 72895 + 1), + "Masaram Gondi": range(72960, 73055 + 1), + "Cuneiform": range(73728, 74751 + 1), + "Cuneiform Numbers and Punctuation": range(74752, 74879 + 1), + "Early Dynastic Cuneiform": range(74880, 75087 + 1), + "Egyptian Hieroglyphs": range(77824, 78895 + 1), + "Anatolian Hieroglyphs": range(82944, 83583 + 1), + "Bamum Supplement": range(92160, 92735 + 1), + "Mro": range(92736, 92783 + 1), + "Bassa Vah": range(92880, 92927 + 1), + "Pahawh Hmong": range(92928, 93071 + 1), + "Miao": range(93952, 94111 + 1), + "Ideographic Symbols and Punctuation": range(94176, 94207 + 1), + "Tangut": range(94208, 100351 + 1), + "Tangut Components": range(100352, 101119 + 1), + "Kana Supplement": range(110592, 110847 + 1), + "Kana Extended-A": range(110848, 110895 + 1), + "Nushu": range(110960, 111359 + 1), + "Duployan": range(113664, 113823 + 1), + "Shorthand Format Controls": range(113824, 113839 + 1), + "Byzantine Musical Symbols": range(118784, 119039 + 1), + "Musical Symbols": range(119040, 119295 + 1), + "Ancient Greek Musical Notation": range(119296, 119375 + 1), + "Tai Xuan Jing Symbols": range(119552, 119647 + 1), + "Counting Rod Numerals": range(119648, 119679 + 1), + "Mathematical Alphanumeric Symbols": range(119808, 120831 + 1), + "Sutton SignWriting": range(120832, 121519 + 1), + "Glagolitic Supplement": range(122880, 122927 + 1), + "Mende Kikakui": range(124928, 125151 + 1), + "Adlam": range(125184, 125279 + 1), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126719 + 1), + "Mahjong Tiles": range(126976, 127023 + 1), + "Domino Tiles": range(127024, 127135 + 1), + "Playing Cards": range(127136, 127231 + 1), + "Enclosed Alphanumeric Supplement": range(127232, 127487 + 1), + "Enclosed Ideographic Supplement": range(127488, 127743 + 1), + "Miscellaneous Symbols and Pictographs": range(127744, 128511 + 1), + "Emoticons range(Emoji)": range(128512, 128591 + 1), + "Ornamental Dingbats": range(128592, 128639 + 1), + "Transport and Map Symbols": range(128640, 128767 + 1), + "Alchemical Symbols": range(128768, 128895 + 1), + "Geometric Shapes Extended": range(128896, 129023 + 1), + "Supplemental Arrows-C": range(129024, 129279 + 1), + "Supplemental Symbols and Pictographs": range(129280, 129535 + 1), + "CJK Unified Ideographs Extension B": range(131072, 173791 + 1), + "CJK Unified Ideographs Extension C": range(173824, 177983 + 1), + "CJK Unified Ideographs Extension D": range(177984, 178207 + 1), + "CJK Unified Ideographs Extension E": range(178208, 183983 + 1), + "CJK Unified Ideographs Extension F": range(183984, 191471 + 1), + "CJK Compatibility Ideographs Supplement": range(194560, 195103 + 1), + "Tags": range(917504, 917631 + 1), + "Variation Selectors Supplement": range(917760, 917999 + 1), +} # type: Dict[str, range] + + +UNICODE_SECONDARY_RANGE_KEYWORD = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] # type: List[str] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_SUPPORTED = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())), + ) +) # type: List[str] + +IANA_SUPPORTED_COUNT = len(IANA_SUPPORTED) # type: int + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} # type: Dict[str, List[str]] + + +CHARDET_CORRESPONDENCE = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} # type: Dict[str, str] + + +COMMON_SAFE_ASCII_CHARACTERS = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", +} # type: Set[str] + + +KO_NAMES = {"johab", "cp949", "euc_kr"} # type: Set[str] +ZH_NAMES = {"big5", "cp950", "big5hkscs", "hz"} # type: Set[str] + +NOT_PRINTABLE_PATTERN = re_compile(r"[0-9\W\n\r\t]+") + +LANGUAGE_SUPPORTED_COUNT = len(FREQUENCIES) # type: int + +# Logging LEVEL bellow DEBUG +TRACE = 5 # type: int diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/legacy.py b/.venv/lib/python3.9/site-packages/charset_normalizer/legacy.py new file mode 100644 index 0000000..cdebe2b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/legacy.py @@ -0,0 +1,95 @@ +import warnings +from typing import Dict, Optional, Union + +from .api import from_bytes, from_fp, from_path, normalize +from .constant import CHARDET_CORRESPONDENCE +from .models import CharsetMatch, CharsetMatches + + +def detect(byte_str: bytes) -> Dict[str, Optional[Union[str, float]]]: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + """ + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + return { + "encoding": encoding + if encoding not in CHARDET_CORRESPONDENCE + else CHARDET_CORRESPONDENCE[encoding], + "language": language, + "confidence": confidence, + } + + +class CharsetNormalizerMatch(CharsetMatch): + pass + + +class CharsetNormalizerMatches(CharsetMatches): + @staticmethod + def from_fp(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_fp(*args, **kwargs) # pragma: nocover + + @staticmethod + def from_bytes(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_bytes(*args, **kwargs) # pragma: nocover + + @staticmethod + def from_path(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_path(*args, **kwargs) # pragma: nocover + + @staticmethod + def normalize(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return normalize(*args, **kwargs) # pragma: nocover + + +class CharsetDetector(CharsetNormalizerMatches): + pass + + +class CharsetDoctor(CharsetNormalizerMatches): + pass diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/md.py b/.venv/lib/python3.9/site-packages/charset_normalizer/md.py new file mode 100644 index 0000000..f3d6505 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/md.py @@ -0,0 +1,559 @@ +from functools import lru_cache +from typing import List, Optional + +from .constant import COMMON_SAFE_ASCII_CHARACTERS, UNICODE_SECONDARY_RANGE_KEYWORD +from .utils import ( + is_accentuated, + is_ascii, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count = 0 # type: int + self._symbol_count = 0 # type: int + self._character_count = 0 # type: int + + self._last_printable_char = None # type: Optional[str] + self._frenzy_symbol_in_word = False # type: bool + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation = ( + self._punctuation_count + self._symbol_count + ) / self._character_count # type: float + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count = 0 # type: int + self._accentuated_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + ratio_of_accentuation = ( + self._accentuated_count / self._character_count + ) # type: float + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count = 0 # type: int + self._character_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + ): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count = 0 # type: int + self._character_count = 0 # type: int + + self._last_latin_character = None # type: Optional[str] + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count = 0 # type: int + self._character_count = 0 # type: int + self._last_printable_seen = None # type: Optional[str] + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a = unicode_range( + self._last_printable_seen + ) # type: Optional[str] + unicode_range_b = unicode_range(character) # type: Optional[str] + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_suspicious_range_usage = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count # type: float + + if ratio_of_suspicious_range_usage < 0.1: + return 0.0 + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count = 0 # type: int + self._bad_word_count = 0 # type: int + self._foreign_long_count = 0 # type: int + + self._is_current_word_bad = False # type: bool + self._foreign_long_watch = False # type: bool + + self._character_count = 0 # type: int + self._bad_character_count = 0 # type: int + + self._buffer = "" # type: str + self._buffer_accent_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer = "".join([self._buffer, character]) + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length = len(self._buffer) # type: int + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length > 0.34: + self._is_current_word_bad = True + # Word/Buffer ending with a upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper(): + self._foreign_long_count += 1 + self._is_current_word_bad = True + if buffer_length >= 24 and self._foreign_long_watch: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count = 0 # type: int + self._cjk_character_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf = False # type: bool + + self._character_count_since_last_sep = 0 # type: int + + self._successive_upper_lower_count = 0 # type: int + self._successive_upper_lower_count_final = 0 # type: int + + self._character_count = 0 # type: int + + self._last_alpha_seen = None # type: Optional[str] + self._current_ascii_only = True # type: bool + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and is_ascii(character) is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] # type: List[MessDetectorPlugin] + + length = len(decoded_sequence) + 1 # type: int + + mean_mess_ratio = 0.0 # type: float + + if length < 512: + intermediary_mean_mess_ratio_calc = 32 # type: int + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + for dt in detectors: # pragma: nocover + print(dt.__class__, dt.ratio) + + return round(mean_mess_ratio, 3) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/models.py b/.venv/lib/python3.9/site-packages/charset_normalizer/models.py new file mode 100644 index 0000000..c38da31 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/models.py @@ -0,0 +1,392 @@ +import warnings +from collections import Counter +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import NOT_PRINTABLE_PATTERN, TOO_BIG_SEQUENCE +from .md import mess_ratio +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload = payload # type: bytes + + self._encoding = guessed_encoding # type: str + self._mean_mess_ratio = mean_mess_ratio # type: float + self._languages = languages # type: CoherenceMatches + self._has_sig_or_bom = has_sig_or_bom # type: bool + self._unicode_ranges = None # type: Optional[List[str]] + + self._leaves = [] # type: List[CharsetMatch] + self._mean_coherence_ratio = 0.0 # type: float + + self._output_payload = None # type: Optional[bytes] + self._output_encoding = None # type: Optional[str] + + self._string = decoded_payload # type: Optional[str] + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference = abs(self.chaos - other.chaos) # type: float + coherence_difference = abs(self.coherence - other.coherence) # type: float + + # Bellow 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + # When having a tough decision, use the result that decoded as many multi-byte as possible. + if chaos_difference == 0.0 and self.coherence == other.coherence: + return self.multi_byte_usage > other.multi_byte_usage + return self.coherence > other.coherence + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - len(str(self)) / len(self.raw) + + @property + def chaos_secondary_pass(self) -> float: + """ + Check once again chaos in decoded text, except this time, with full content. + Use with caution, this can be very slow. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "chaos_secondary_pass is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return mess_ratio(str(self), 1.0) + + @property + def coherence_non_latin(self) -> float: + """ + Coherence ratio on the first non-latin language detected if ANY. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "coherence_non_latin is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return 0.0 + + @property + def w_counter(self) -> Counter: + """ + Word counter instance on decoded text. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "w_counter is deprecated and will be removed in 3.0", DeprecationWarning + ) + + string_printable_only = sub(NOT_PRINTABLE_PATTERN, " ", str(self).lower()) + + return Counter(string_printable_only.split()) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as = [] # type: List[str] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges = [ + unicode_range(char) for char in str(self) + ] # type: List[Optional[str]] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def first(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def best(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: List[CharsetMatch] = None): + self._results = sorted(results) if results else [] # type: List[CharsetMatch] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path = path # type: str + self.unicode_path = unicode_path # type: Optional[str] + self.encoding = encoding # type: Optional[str] + self.encoding_aliases = encoding_aliases # type: List[str] + self.alternative_encodings = alternative_encodings # type: List[str] + self.language = language # type: str + self.alphabets = alphabets # type: List[str] + self.has_sig_or_bom = has_sig_or_bom # type: bool + self.chaos = chaos # type: float + self.coherence = coherence # type: float + self.is_preferred = is_preferred # type: bool + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/py.typed b/.venv/lib/python3.9/site-packages/charset_normalizer/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/utils.py b/.venv/lib/python3.9/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000..dcb14df --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/utils.py @@ -0,0 +1,342 @@ +try: + import unicodedata2 as unicodedata +except ImportError: + import unicodedata # type: ignore[no-redef] + +import importlib +import logging +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder # type: ignore + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description = unicodedata.name(character) # type: str + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed = unicodedata.decomposition(character) # type: str + if not decomposed: + return character + + codes = decomposed.split(" ") # type: List[str] + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord = ord(character) # type: int + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description = unicodedata.name(character) # type: str + except ValueError: + return False + return "LATIN" in description + + +def is_ascii(character: str) -> bool: + try: + character.encode("ascii") + except UnicodeEncodeError: + return False + return True + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + if "P" in character_category: + return True + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + if "S" in character_category or "N" in character_category: + return True + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Forms" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Emoticons" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}: + return True + + character_category = unicodedata.category(character) # type: str + + return "Z" in character_category + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +def is_private_use_only(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + return character_category == "Co" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len = len(sequence) # type: int + + results = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) # type: List[str] + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, # type: ignore + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks = ENCODING_MARKS[iana_encoding] # type: Union[bytes, List[bytes]] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges = set() # type: Set[str] + + for character in decoded_sequence: + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module("encodings.{}".format(iana_name_a)).IncrementalDecoder # type: ignore + decoder_b = importlib.import_module("encodings.{}".format(iana_name_b)).IncrementalDecoder # type: ignore + + id_a = decoder_a(errors="ignore") # type: IncrementalDecoder + id_b = decoder_b(errors="ignore") # type: IncrementalDecoder + + character_match_count = 0 # type: int + + for i in range(255): + to_be_decoded = bytes([i]) # type: bytes + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) diff --git a/.venv/lib/python3.9/site-packages/charset_normalizer/version.py b/.venv/lib/python3.9/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000..77cfff2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "2.0.12" +VERSION = __version__.split(".") diff --git a/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/LICENSE new file mode 100644 index 0000000..f053641 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright (C) 2006-2020 + Arkadiusz Bulski (arek.bulski@gmail.com) + Tomer Filiba (tomerfiliba@gmail.com) + Corbin Simpson (MostAwesomeDude@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/METADATA b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/METADATA new file mode 100644 index 0000000..9745422 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/METADATA @@ -0,0 +1,80 @@ +Metadata-Version: 2.1 +Name: construct +Version: 2.10.67 +Summary: A powerful declarative symmetric parser/builder for binary data +Home-page: http://construct.readthedocs.org +Author: Arkadiusz Bulski, Tomer Filiba, Corbin Simpson +Author-email: arek.bulski@gmail.com, tomerfiliba@gmail.com, MostAwesomeDude@gmail.com +License: MIT +Keywords: construct,kaitai,declarative,data structure,struct,binary,symmetric,parser,builder,parsing,building,pack,unpack,packer,unpacker +Platform: POSIX +Platform: Windows +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Code Generators +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.6 +License-File: LICENSE +Provides-Extra: extras +Requires-Dist: enum34 ; extra == 'extras' +Requires-Dist: numpy ; extra == 'extras' +Requires-Dist: arrow ; extra == 'extras' +Requires-Dist: ruamel.yaml ; extra == 'extras' +Requires-Dist: cloudpickle ; extra == 'extras' +Requires-Dist: lz4 ; extra == 'extras' + +Construct 2.10 +=================== + +Construct is a powerful **declarative** and **symmetrical** parser and builder for binary data. + +Instead of writing *imperative code* to parse a piece of data, you declaratively define a *data structure* that describes your data. As this data structure is not code, you can use it in one direction to *parse* data into Pythonic objects, and in the other direction, to *build* objects into binary data. + +The library provides both simple, atomic constructs (such as integers of various sizes), as well as composite ones which allow you form hierarchical and sequential structures of increasing complexity. Construct features **bit and byte granularity**, easy debugging and testing, an **easy-to-extend subclass system**, and lots of primitive constructs to make your work easier: + +* Fields: raw bytes or numerical types +* Structs and Sequences: combine simpler constructs into more complex ones +* Bitwise: splitting bytes into bit-grained fields +* Adapters: change how data is represented +* Arrays/Ranges: duplicate constructs +* Meta-constructs: use the context (history) to compute the size of data +* If/Switch: branch the computational path based on the context +* On-demand (lazy) parsing: read and parse only what you require +* Pointers: jump from here to there in the data stream +* Tunneling: prefix data with a byte count or compress it + + +Example +--------- + +A ``Struct`` is a collection of ordered, named fields:: + + >>> format = Struct( + ... "signature" / Const(b"BMP"), + ... "width" / Int8ub, + ... "height" / Int8ub, + ... "pixels" / Array(this.width * this.height, Byte), + ... ) + >>> format.build(dict(width=3,height=2,pixels=[7,8,9,11,12,13])) + b'BMP\x03\x02\x07\x08\t\x0b\x0c\r' + >>> format.parse(b'BMP\x03\x02\x07\x08\t\x0b\x0c\r') + Container(signature=b'BMP')(width=3)(height=2)(pixels=[7, 8, 9, 11, 12, 13]) + +A ``Sequence`` is a collection of ordered fields, and differs from ``Array`` and ``GreedyRange`` in that those two are homogenous:: + + >>> format = Sequence(PascalString(Byte, "utf8"), GreedyRange(Byte)) + >>> format.build([u"lalaland", [255,1,2]]) + b'\nlalaland\xff\x01\x02' + >>> format.parse(b"\x004361789432197") + ['', [52, 51, 54, 49, 55, 56, 57, 52, 51, 50, 49, 57, 55]] + + diff --git a/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/RECORD b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/RECORD new file mode 100644 index 0000000..d066730 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/RECORD @@ -0,0 +1,28 @@ +construct-2.10.67.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +construct-2.10.67.dist-info/LICENSE,sha256=FVLXCs_Q0_5GTOE9MBE93G_kusIeUiEqzJhQnjzBqPQ,1181 +construct-2.10.67.dist-info/METADATA,sha256=bS1cDPaKSHRSO8a9O17NCS7juohWZbMBIH2xO1HppnE,3847 +construct-2.10.67.dist-info/RECORD,, +construct-2.10.67.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +construct-2.10.67.dist-info/top_level.txt,sha256=U6RMh-ZeTF_f_3gvWEebpB1sa2OgGwKtNgCYmud_eyo,10 +construct/__init__.py,sha256=djKxTqHClhMIZ3nu1dau9_uAXjULB3WPj7wEPQDCio8,4260 +construct/__pycache__/__init__.cpython-39.pyc,, +construct/__pycache__/core.cpython-39.pyc,, +construct/__pycache__/debug.cpython-39.pyc,, +construct/__pycache__/expr.cpython-39.pyc,, +construct/__pycache__/version.cpython-39.pyc,, +construct/core.py,sha256=zUwDPLHMkA-H3ykWmKcWYnFhJ3Y3N2wQNg6c8vTSagA,241111 +construct/debug.py,sha256=mEuhe0AiIEtinU6Ji5vPgZQOgFZDi7RbsbH1TceqGcg,5561 +construct/expr.py,sha256=aFryazbrfOr7cfSMuhbQ7rZuDDkHjd1jZyLyCg_UABM,7439 +construct/lib/__init__.py,sha256=KGbZEE-eY722Xs6H_vQwgFwmo4-UqvoB3NjXPF02wA4,1116 +construct/lib/__pycache__/__init__.cpython-39.pyc,, +construct/lib/__pycache__/binary.cpython-39.pyc,, +construct/lib/__pycache__/bitstream.cpython-39.pyc,, +construct/lib/__pycache__/containers.cpython-39.pyc,, +construct/lib/__pycache__/hex.cpython-39.pyc,, +construct/lib/__pycache__/py3compat.cpython-39.pyc,, +construct/lib/binary.py,sha256=WnEUm2deHovyAi6f6b_E2wBkM98EbJH6SFku3v4B8IM,4388 +construct/lib/bitstream.py,sha256=skmGamgfxczqBU1X0vcXv3GAe1eGd9ksRVgjoffjnF8,5007 +construct/lib/containers.py,sha256=6XixGhw651lbnm0kUrrF_OtkN524TUcoo8q0YS6oh6U,9896 +construct/lib/hex.py,sha256=wSJYwV00Zk_a_Q0tOLHvXU3K7c57zhGffPq3mh0wOfI,3174 +construct/lib/py3compat.py,sha256=T8YjYDnfHliTg10PKJ7qztJa0DudCdHods6y5H-tr40,1523 +construct/version.py,sha256=bSWQqTDO2NR8WxYZNgMNJotezKV4dVWRqxA4B5KvFaM,75 diff --git a/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/top_level.txt new file mode 100644 index 0000000..8ed9413 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-2.10.67.dist-info/top_level.txt @@ -0,0 +1 @@ +construct diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/__init__.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/__init__.pyi new file mode 100644 index 0000000..858384d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/__init__.pyi @@ -0,0 +1,220 @@ +from construct.core import * +from construct.debug import * +from construct.expr import * +from construct.lib import * +from construct.version import * + +#=============================================================================== +# exposed names +#=============================================================================== +__all__ = [ + '__author__', + '__version__', + 'abs_', + 'AdaptationError', + 'Adapter', + 'Aligned', + 'AlignedStruct', + 'Array', + 'Bit', + 'BitsInteger', + 'BitsSwapped', + 'BitStruct', + 'BitwisableString', + 'Bitwise', + 'Byte', + 'Bytes', + 'BytesInteger', + 'ByteSwapped', + 'Bytewise', + 'CancelParsing', + 'Check', + 'CheckError', + 'Checksum', + 'ChecksumError', + 'Compiled', + 'Compressed', + 'CompressedLZ4', + 'Computed', + 'Const', + 'ConstError', + 'Construct', + 'ConstructError', + 'Container', + 'CString', + 'Debugger', + 'Default', + 'Double', + 'Enum', + 'EnumInteger', + 'EnumIntegerString', + 'Error', + 'ExplicitError', + 'ExprAdapter', + 'ExprSymmetricAdapter', + 'ExprValidator', + 'Filter', + 'FixedSized', + 'Flag', + 'FlagsEnum', + "Float16b", + "Float16l", + "Float16n", + "Float32b", + "Float32l", + "Float32n", + "Float64b", + "Float64l", + "Float64n", + 'FocusedSeq', + 'FormatField', + 'FormatFieldError', + 'FuncPath', + 'globalPrintFalseFlags', + 'globalPrintFullStrings', + 'GreedyBytes', + 'GreedyRange', + 'GreedyString', + 'Half', + 'Hex', + 'HexDump', + 'If', + 'IfThenElse', + 'Index', + 'IndexFieldError', + 'Indexing', + 'Int', + "Int8sb", + "Int8sl", + "Int8sn", + "Int8ub", + "Int8ul", + "Int8un", + "Int16sb", + "Int16sl", + "Int16sn", + "Int16ub", + "Int16ul", + "Int16un", + "Int24sb", + "Int24sl", + "Int24sn", + "Int24ub", + "Int24ul", + "Int24un", + "Int32sb", + "Int32sl", + "Int32sn", + "Int32ub", + "Int32ul", + "Int32un", + "Int64sb", + "Int64sl", + "Int64sn", + "Int64ub", + "Int64ul", + "Int64un", + 'IntegerError', + 'Lazy', + 'LazyArray', + 'LazyBound', + 'LazyContainer', + 'LazyListContainer', + 'LazyStruct', + 'len_', + 'lib', + 'list_', + 'ListContainer', + 'Long', + 'Mapping', + 'MappingError', + 'max_', + 'min_', + 'NamedTuple', + 'NamedTupleError', + 'Nibble', + 'NoneOf', + 'NullStripped', + 'NullTerminated', + 'Numpy', + 'obj_', + 'Octet', + 'OneOf', + 'Optional', + 'Padded', + 'PaddedString', + 'Padding', + 'PaddingError', + 'PascalString', + 'Pass', + 'Path', + 'Path2', + 'Peek', + 'Pickled', + 'Pointer', + 'possiblestringencodings', + 'Prefixed', + 'PrefixedArray', + 'Probe', + 'ProcessRotateLeft', + 'ProcessXor', + 'RangeError', + 'RawCopy', + 'Rebuffered', + 'RebufferedBytesIO', + 'Rebuild', + 'release_date', + 'Renamed', + 'RepeatError', + 'RepeatUntil', + 'RestreamData', + 'Restreamed', + 'RestreamedBytesIO', + 'RotationError', + 'Seek', + 'Select', + 'SelectError', + 'Sequence', + 'setGlobalPrintFalseFlags', + 'setGlobalPrintFullStrings', + 'setGlobalPrintPrivateEntries', + 'Short', + 'Single', + 'SizeofError', + 'Slicing', + 'StopFieldError', + 'StopIf', + 'stream_iseof', + 'stream_read', + 'stream_read_entire', + 'stream_seek', + 'stream_size', + 'stream_tell', + 'stream_write', + 'StreamError', + 'StringEncoded', + 'StringError', + 'Struct', + 'Subconstruct', + 'sum_', + 'Switch', + 'SwitchError', + 'SymmetricAdapter', + 'Tell', + 'Terminated', + 'TerminatedError', + 'this', + 'Timestamp', + 'TimestampAdapter', + 'TimestampError', + 'Transformed', + 'Tunnel', + 'Union', + 'UnionError', + 'ValidationError', + 'Validator', + 'VarInt', + 'version', + 'version_string', + 'ZigZag', +] diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/core.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/core.pyi new file mode 100644 index 0000000..73a1f88 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/core.pyi @@ -0,0 +1,1199 @@ +import enum +import io +import sys +import typing as t + +import arrow +from construct.lib import ( + Container, + ContainerType, + HexDisplayedBytes, + HexDisplayedDict, + HexDisplayedInteger, + HexDumpDisplayedBytes, + HexDumpDisplayedDict, + ListContainer, + ListType, + RebufferedBytesIO, +) + +# unfortunately, there are a few duplications with "typing", e.g. Union and Optional, which is why the t. prefix must be used everywhere + +# Some of the Constructs can be optimised when the following typing optimisations are available: +# - Variadic Generics: https://mail.python.org/archives/list/typing-sig@python.org/thread/SQVTQYWIOI4TIO7NNBTFFWFMSMS2TA4J/ +# - Higher Kinded Types: https://github.com/python/typing/issues/548 +# - Higher Kinded Types: https://sobolevn.me/2020/10/higher-kinded-types-in-python + +StreamType = t.BinaryIO +PathType = str +ContextKWType = t.Any + +# =============================================================================== +# exceptions +# =============================================================================== +class ConstructError(Exception): + path: t.Optional[PathType] + def __init__( + self, message: str = ..., path: t.Optional[PathType] = ... + ) -> None: ... + +class SizeofError(ConstructError): ... +class AdaptationError(ConstructError): ... +class ValidationError(ConstructError): ... +class StreamError(ConstructError): ... +class FormatFieldError(ConstructError): ... +class IntegerError(ConstructError): ... +class StringError(ConstructError): ... +class MappingError(ConstructError): ... +class RangeError(ConstructError): ... +class RepeatError(ConstructError): ... +class ConstError(ConstructError): ... +class IndexFieldError(ConstructError): ... +class CheckError(ConstructError): ... +class ExplicitError(ConstructError): ... +class NamedTupleError(ConstructError): ... +class TimestampError(ConstructError): ... +class UnionError(ConstructError): ... +class SelectError(ConstructError): ... +class SwitchError(ConstructError): ... +class StopFieldError(ConstructError): ... +class PaddingError(ConstructError): ... +class TerminatedError(ConstructError): ... +class RawCopyError(ConstructError): ... +class RotationError(ConstructError): ... +class ChecksumError(ConstructError): ... +class CancelParsing(ConstructError): ... + +# =============================================================================== +# used internally +# =============================================================================== +def stream_read( + stream: t.BinaryIO, length: int, path: t.Optional[PathType] +) -> bytes: ... +def stream_read_entire(stream: t.BinaryIO, path: t.Optional[PathType]) -> bytes: ... +def stream_write( + stream: t.BinaryIO, data: bytes, length: int, path: t.Optional[PathType] +) -> None: ... +def stream_seek( + stream: t.BinaryIO, offset: int, whence: int, path: t.Optional[PathType] +) -> int: ... +def stream_tell(stream: t.BinaryIO, path: t.Optional[PathType]) -> int: ... +def stream_size(stream: t.BinaryIO) -> int: ... +def stream_iseof(stream: t.BinaryIO) -> bool: ... +def evaluate(param: ConstantOrContextLambda2[T], context: Context) -> T: ... + +# =============================================================================== +# abstract constructs +# =============================================================================== +ParsedType = t.TypeVar("ParsedType", covariant=True) +BuildTypes = t.TypeVar("BuildTypes", contravariant=True) + +class Construct(t.Generic[ParsedType, BuildTypes]): + name: t.Optional[str] + docs: str + flagbuildnone: bool + parsed: t.Optional[t.Callable[[ParsedType, Context], None]] + def parse(self, data: bytes, **contextkw: ContextKWType) -> ParsedType: ... + def parse_stream( + self, stream: StreamType, **contextkw: ContextKWType + ) -> ParsedType: ... + def parse_file(self, filename: str, **contextkw: ContextKWType) -> ParsedType: ... + def build(self, obj: BuildTypes, **contextkw: ContextKWType) -> bytes: ... + def build_stream( + self, obj: BuildTypes, stream: StreamType, **contextkw: ContextKWType + ) -> bytes: ... + def build_file( + self, obj: BuildTypes, filename: str, **contextkw: ContextKWType + ) -> bytes: ... + def sizeof(self, **contextkw: ContextKWType) -> int: ... + def compile(self, filename: str = ...) -> Construct[ParsedType, BuildTypes]: ... + def benchmark(self, sampledata: bytes, filename: str = ...) -> str: ... + def export_ksy(self, schemaname: str = ..., filename: str = ...) -> str: ... + def __rtruediv__( + self, name: t.Optional[t.AnyStr] + ) -> Renamed[ParsedType, BuildTypes]: ... + __rdiv__: t.Callable[[str], Construct[ParsedType, BuildTypes]] + def __mul__( + self, + other: t.Union[str, bytes, t.Callable[[ParsedType, Context], None]], + ) -> Renamed[ParsedType, BuildTypes]: ... + def __rmul__( + self, + other: t.Union[str, bytes, t.Callable[[ParsedType, Context], None]], + ) -> Renamed[ParsedType, BuildTypes]: ... + def __add__( + self, other: Construct[t.Any, t.Any] + ) -> Struct[Container[t.Any], t.Optional[t.Dict[str, t.Any]]]: ... + def __rshift__( + self, other: Construct[t.Any, t.Any] + ) -> Sequence[ListContainer[t.Any], t.Optional[t.List[t.Any]]]: ... + def __getitem__( + self, count: t.Union[int, t.Callable[[Context], int]] + ) -> Array[ + ParsedType, + BuildTypes, + ListContainer[ParsedType], + t.List[BuildTypes], + ]: ... + +@t.type_check_only +class Context(Container[t.Any]): + _: Context # optional field + _params: Context # optional field + _root: Context # optional field + _parsing: bool + _building: bool + _sizing: bool + _subcons: Container[Construct[t.Any, t.Any]] + _io: StreamType # optional field + _index: int # optional field + +ValueType = t.TypeVar("ValueType") +ConstantOrContextLambda = t.Union[ValueType, t.Callable[[Context], t.Any]] +ConstantOrContextLambda2 = t.Union[ValueType, t.Callable[[Context], ValueType]] + +SubconParsedType = t.TypeVar("SubconParsedType", covariant=True) +SubconBuildTypes = t.TypeVar("SubconBuildTypes", contravariant=True) + +class Subconstruct( + t.Generic[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes], + Construct[ParsedType, BuildTypes], +): + subcon: Construct[SubconParsedType, SubconBuildTypes] + @t.overload + def __new__( + cls, subcon: Construct[SubconParsedType, SubconBuildTypes] + ) -> Subconstruct[ + SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes + ]: ... + @t.overload + def __new__( + cls, *args: t.Any, **kwargs: t.Any + ) -> Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]: ... + +class Adapter( + Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes], +): + def __new__( + cls, subcon: Construct[SubconParsedType, SubconBuildTypes] + ) -> Adapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]: ... + def _decode( + self, obj: SubconBuildTypes, context: Context, path: PathType + ) -> ParsedType: ... + def _encode( + self, obj: BuildTypes, context: Context, path: PathType + ) -> SubconBuildTypes: ... + +class SymmetricAdapter( + Adapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes] +): ... + +class Validator( + SymmetricAdapter[ + SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes + ] +): + def _validate( + self, obj: SubconBuildTypes, context: Context, path: PathType + ) -> bool: ... + +class Tunnel( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + def _decode(self, data: bytes, context: Context, path: PathType) -> bytes: ... + def _encode(self, data: bytes, context: Context, path: PathType) -> bytes: ... + +# TODO: Compiled + +# =============================================================================== +# bytes and bits +# =============================================================================== +class Bytes(Construct[ParsedType, BuildTypes]): + length: ConstantOrContextLambda[int] + def __new__( + cls, length: ConstantOrContextLambda[int] + ) -> Bytes[bytes, t.Union[bytes, int]]: ... + +GreedyBytes: Construct[bytes, bytes] + +def Bitwise( + subcon: Construct[SubconParsedType, SubconBuildTypes] +) -> t.Union[ + Transformed[SubconParsedType, SubconBuildTypes], + Restreamed[SubconParsedType, SubconBuildTypes], +]: ... +def Bytewise( + subcon: Construct[SubconParsedType, SubconBuildTypes] +) -> t.Union[ + Transformed[SubconParsedType, SubconBuildTypes], + Restreamed[SubconParsedType, SubconBuildTypes], +]: ... + +# =============================================================================== +# integers and floats +# =============================================================================== +class FormatField(Construct[ParsedType, BuildTypes]): + fmtstr: str + length: int + if sys.version_info >= (3, 8): + ENDIANITY = t.Union[t.Literal["=", "<", ">"], str] + FORMAT_INT = t.Literal["B", "H", "L", "Q", "b", "h", "l", "q"] + FORMAT_FLOAT = t.Literal["f", "d", "e"] + FORMAT_BOOL = t.Literal["?"] + @t.overload + def __new__( + cls, endianity: str, format: FORMAT_INT + ) -> FormatField[int, int]: ... + @t.overload + def __new__( + cls, endianity: str, format: FORMAT_FLOAT + ) -> FormatField[float, float]: ... + @t.overload + def __new__( + cls, endianity: str, format: FORMAT_BOOL + ) -> FormatField[bool, bool]: ... + @t.overload + def __new__(cls, endianity: str, format: str) -> FormatField[t.Any, t.Any]: ... + else: + def __new__(cls, endianity: str, format: str) -> FormatField[t.Any, t.Any]: ... + +class BytesInteger(Construct[ParsedType, BuildTypes]): + length: ConstantOrContextLambda[int] + signed: bool + swapped: ConstantOrContextLambda[bool] + def __new__( + cls, + length: ConstantOrContextLambda[int], + signed: bool = ..., + swapped: ConstantOrContextLambda[bool] = ..., + ) -> BytesInteger[int, int]: ... + +class BitsInteger(Construct[ParsedType, BuildTypes]): + length: ConstantOrContextLambda[int] + signed: bool + swapped: ConstantOrContextLambda[bool] + def __new__( + cls, + length: ConstantOrContextLambda[int], + signed: bool = ..., + swapped: ConstantOrContextLambda[bool] = ..., + ) -> BitsInteger[int, int]: ... + +Bit: BitsInteger[int, int] +Nibble: BitsInteger[int, int] +Octet: BitsInteger[int, int] + +Int8ub: FormatField[int, int] +Int16ub: FormatField[int, int] +Int32ub: FormatField[int, int] +Int64ub: FormatField[int, int] +Int8sb: FormatField[int, int] +Int16sb: FormatField[int, int] +Int32sb: FormatField[int, int] +Int64sb: FormatField[int, int] +Int8ul: FormatField[int, int] +Int16ul: FormatField[int, int] +Int32ul: FormatField[int, int] +Int64ul: FormatField[int, int] +Int8sl: FormatField[int, int] +Int16sl: FormatField[int, int] +Int32sl: FormatField[int, int] +Int64sl: FormatField[int, int] +Int8un: FormatField[int, int] +Int16un: FormatField[int, int] +Int32un: FormatField[int, int] +Int64un: FormatField[int, int] +Int8sn: FormatField[int, int] +Int16sn: FormatField[int, int] +Int32sn: FormatField[int, int] +Int64sn: FormatField[int, int] + +Byte: FormatField[int, int] +Short: FormatField[int, int] +Int: FormatField[int, int] +Long: FormatField[int, int] + +Float16b: FormatField[float, float] +Float16l: FormatField[float, float] +Float16n: FormatField[float, float] +Float32b: FormatField[float, float] +Float32l: FormatField[float, float] +Float32n: FormatField[float, float] +Float64b: FormatField[float, float] +Float64l: FormatField[float, float] +Float64n: FormatField[float, float] + +Half: FormatField[float, float] +Single: FormatField[float, float] +Double: FormatField[float, float] + +Int24ub: BytesInteger[int, int] +Int24ul: BytesInteger[int, int] +Int24un: BytesInteger[int, int] +Int24sb: BytesInteger[int, int] +Int24sl: BytesInteger[int, int] +Int24sn: BytesInteger[int, int] + +VarInt: Construct[int, int] +ZigZag: Construct[int, int] + +# =============================================================================== +# strings +# =============================================================================== +class StringEncoded(Construct[ParsedType, BuildTypes]): + if sys.version_info >= (3, 8): + ENCODING_1 = t.Literal["ascii", "utf8", "utf_8", "u8"] + ENCODING_2 = t.Literal["utf16", "utf_16", "u16", "utf_16_be", "utf_16_le"] + ENCODING_4 = t.Literal["utf32", "utf_32", "u32", "utf_32_be", "utf_32_le"] + ENCODING = t.Union[str, ENCODING_1, ENCODING_2, ENCODING_4] + else: + ENCODING = str + encoding: ENCODING + def __new__( + cls, subcon: Construct[ParsedType, BuildTypes], encoding: ENCODING + ) -> StringEncoded[str, str]: ... + +def PaddedString( + length: ConstantOrContextLambda[int], encoding: StringEncoded.ENCODING +) -> StringEncoded[str, str]: ... +def PascalString( + lengthfield: Construct[int, int], encoding: StringEncoded.ENCODING +) -> StringEncoded[str, str]: ... +def CString(encoding: StringEncoded.ENCODING) -> StringEncoded[str, str]: ... +def GreedyString(encoding: StringEncoded.ENCODING) -> StringEncoded[str, str]: ... + +# =============================================================================== +# mappings +# =============================================================================== +Flag: Construct[bool, bool] + +class EnumInteger(int): ... + +class EnumIntegerString(str): + @staticmethod + def new(intvalue: int, stringvalue: str) -> EnumIntegerString: ... + +class Enum(Adapter[int, int, ParsedType, BuildTypes]): + encmapping: t.Dict[str, int] + decmapping: t.Dict[int, EnumIntegerString] + ksymapping: t.Dict[int, str] + def __new__( + cls, + subcon: Construct[int, int], + *merge: t.Union[t.Type[enum.IntEnum], t.Type[enum.IntFlag]], + **mapping: int + ) -> Enum[t.Union[EnumInteger, EnumIntegerString], t.Union[int, str]]: ... + def __getattr__(self, name: str) -> EnumIntegerString: ... + +class BitwisableString(str): + def __or__(self, other: BitwisableString) -> BitwisableString: ... + +class FlagsEnum(Adapter[int, int, ParsedType, BuildTypes]): + flags: t.Dict[str, int] + reverseflags: t.Dict[int, str] + def __new__( + cls, + subcon: Construct[int, int], + *merge: t.Union[t.Type[enum.IntEnum], t.Type[enum.IntFlag]], + **flags: int + ) -> FlagsEnum[Container[bool], t.Union[int, str, t.Dict[str, bool]]]: ... + def __getattr__(self, name: str) -> BitwisableString: ... + +class Mapping(Adapter[SubconParsedType, SubconBuildTypes, t.Any, t.Any]): + decmapping: t.Dict[int, str] + encmapping: t.Dict[str, int] + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + mapping: t.Dict[t.Any, t.Any], + ) -> Mapping[t.Any, t.Any]: ... + +# =============================================================================== +# structures and sequences +# =============================================================================== +# this can maybe made better when variadic generics are available +class Struct(Construct[ParsedType, BuildTypes]): + subcons: t.List[Construct[t.Any, t.Any]] + def __new__( + cls, *subcons: Construct[t.Any, t.Any], **subconskw: Construct[t.Any, t.Any] + ) -> Struct[Container[t.Any], t.Optional[t.Dict[str, t.Any]]]: ... + def __getattr__(self, name: str) -> t.Any: ... + +# this can maybe made better when variadic generics are available +class Sequence(Construct[ParsedType, BuildTypes]): + subcons: t.List[Construct[t.Any, t.Any]] + def __new__( + cls, *subcons: Construct[t.Any, t.Any], **subconskw: Construct[t.Any, t.Any] + ) -> Sequence[ListContainer[t.Any], t.Optional[t.List[t.Any]]]: ... + def __getattr__(self, name: str) -> t.Any: ... + +# =============================================================================== +# arrays ranges and repeaters +# =============================================================================== +class Array( + Subconstruct[ + SubconParsedType, + SubconBuildTypes, + ParsedType, + BuildTypes, + ] +): + count: ConstantOrContextLambda[int] + discard: bool + def __new__( + cls, + count: ConstantOrContextLambda[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + discard: bool = ..., + ) -> Array[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ]: ... + +class GreedyRange( + Subconstruct[ + SubconParsedType, + SubconBuildTypes, + ParsedType, + BuildTypes, + ] +): + discard: bool + def __new__( + cls, subcon: Construct[SubconParsedType, SubconBuildTypes], discard: bool = ... + ) -> GreedyRange[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ]: ... + +class RepeatUntil( + Subconstruct[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ] +): + predicate: t.Union[ + bool, + t.Callable[[SubconParsedType, ListContainer[SubconParsedType], Context], bool], + ] + discard: bool + def __init__( + self, + predicate: t.Union[ + bool, + t.Callable[ + [SubconParsedType, ListContainer[SubconParsedType], Context], bool + ], + ], + subcon: Construct[SubconParsedType, SubconBuildTypes], + discard: bool = ..., + ) -> None: ... + +# =============================================================================== +# specials +# =============================================================================== +class Renamed( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + newname: t.Optional[str] = ..., + newdocs: t.Optional[str] = ..., + newparsed: t.Optional[t.Callable[[t.Any, Context], None]] = ..., + ) -> None: ... + +# =============================================================================== +# miscellaneous +# =============================================================================== +class Const(Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + value: SubconBuildTypes + @t.overload + def __new__( + cls, + value: bytes, + ) -> Const[None, None, bytes, Bytes[bytes, int]]: ... + @t.overload + def __new__( + cls, + value: SubconBuildTypes, + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> Const[None, None, SubconParsedType, t.Optional[SubconBuildTypes]]: ... + +class Computed(Construct[ParsedType, BuildTypes]): + func: ConstantOrContextLambda2[ParsedType] + @t.overload + def __new__( + cls, func: ConstantOrContextLambda2[ParsedType] + ) -> Computed[ParsedType, None]: ... + @t.overload + def __new__( + cls, func: ConstantOrContextLambda2[t.Any] + ) -> Computed[t.Any, None]: ... + +Index: Construct[int, t.Any] + +class Rebuild(Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + func: ConstantOrContextLambda[SubconBuildTypes] + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + func: ConstantOrContextLambda[SubconBuildTypes], + ) -> Rebuild[SubconParsedType, SubconBuildTypes, SubconParsedType, None]: ... + +class Default(Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + value: ConstantOrContextLambda[SubconBuildTypes] + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + value: ConstantOrContextLambda[SubconBuildTypes], + ) -> Default[ + SubconParsedType, + SubconBuildTypes, + SubconParsedType, + t.Optional[SubconBuildTypes], + ]: ... + +class Check(Construct[ParsedType, BuildTypes]): + func: ConstantOrContextLambda[bool] + def __new__(cls, func: ConstantOrContextLambda[bool]) -> Check[None, None]: ... + +Error: Construct[None, None] + +class FocusedSeq(Construct[t.Any, t.Any]): + subcons: t.List[Construct[t.Any, t.Any]] + def __init__( + self, + parsebuildfrom: ConstantOrContextLambda[str], + *subcons: Construct[t.Any, t.Any], + **subconskw: Construct[t.Any, t.Any] + ) -> None: ... + def __getattr__(self, name: str) -> t.Any: ... + +Pickled: Construct[t.Any, t.Any] + +Numpy: Construct[t.Any, t.Any] + +class NamedTuple( + Adapter[ + SubconParsedType, + SubconBuildTypes, + ParsedType, + BuildTypes, + ] +): + tuplename: str + tuplefields: str + factory: Construct[SubconParsedType, SubconBuildTypes] + def __new__( + cls, + tuplename: str, + tuplefields: str, + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> NamedTuple[ + SubconParsedType, + SubconBuildTypes, + t.Tuple[t.Any, ...], + t.Union[t.Tuple[t.Any, ...], t.List[t.Any], t.Dict[str, t.Any]], + ]: ... + +if sys.version_info >= (3, 8): + MSDOS = t.Literal["msdos"] +else: + MSDOS = str + +class TimestampAdapter( + Adapter[SubconParsedType, SubconBuildTypes, arrow.Arrow, arrow.Arrow] +): ... + +@t.overload +def Timestamp( + subcon: Construct[int, int], unit: MSDOS, epoch: MSDOS +) -> TimestampAdapter[int, int]: ... +@t.overload +def Timestamp( + subcon: Construct[int, int], + unit: t.Union[int, float], + epoch: t.Union[int, arrow.Arrow], +) -> TimestampAdapter[int, int]: ... +@t.overload +def Timestamp( + subcon: Construct[float, float], + unit: t.Union[int, float], + epoch: t.Union[int, arrow.Arrow], +) -> TimestampAdapter[float, float]: ... + +K = t.TypeVar("K") +V = t.TypeVar("V") + +class Hex(Adapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + @t.overload + def __new__( + cls, subcon: Construct[int, BuildTypes] + ) -> Hex[int, BuildTypes, HexDisplayedInteger, BuildTypes]: ... + @t.overload + def __new__( + cls, subcon: Construct[bytes, BuildTypes] + ) -> Hex[bytes, BuildTypes, HexDisplayedBytes, BuildTypes]: ... + @t.overload + def __new__( + cls, subcon: Construct[RawCopyObj[SubconParsedType], BuildTypes] + ) -> Hex[ + RawCopyObj[SubconParsedType], + BuildTypes, + HexDisplayedDict[str, t.Union[int, bytes, SubconParsedType]], + BuildTypes, + ]: ... + @t.overload + def __new__( + cls, subcon: Construct[Container[t.Any], BuildTypes] + ) -> Hex[ + Container[t.Any], BuildTypes, HexDisplayedDict[str, t.Any], BuildTypes + ]: ... + @t.overload + def __new__( + cls, subcon: Construct[SubconParsedType, SubconBuildTypes] + ) -> Hex[ + SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes + ]: ... + +class HexDump(Adapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + @t.overload + def __new__( + cls, subcon: Construct[bytes, BuildTypes] + ) -> HexDump[bytes, BuildTypes, HexDumpDisplayedBytes, BuildTypes]: ... + @t.overload + def __new__( + cls, subcon: Construct[RawCopyObj[SubconParsedType], BuildTypes] + ) -> HexDump[ + RawCopyObj[SubconParsedType], + BuildTypes, + HexDumpDisplayedDict[str, t.Union[int, bytes, SubconParsedType]], + BuildTypes, + ]: ... + @t.overload + def __new__( + cls, subcon: Construct[Container[t.Any], BuildTypes] + ) -> HexDump[ + Container[t.Any], BuildTypes, HexDumpDisplayedDict[str, t.Any], BuildTypes + ]: ... + @t.overload + def __new__( + cls, subcon: Construct[SubconParsedType, SubconBuildTypes] + ) -> HexDump[ + SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes + ]: ... + +# =============================================================================== +# conditional +# =============================================================================== +# this can maybe made better when variadic generics are available +class Union(Construct[Container[t.Any], t.Dict[str, t.Any]]): + parsefrom: t.Optional[ConstantOrContextLambda[t.Union[int, str]]] + subcons: t.List[Construct[t.Any, t.Any]] + def __init__( + self, + parsefrom: t.Optional[ConstantOrContextLambda[t.Union[int, str]]], + *subcons: Construct[t.Any, t.Any], + **subconskw: Construct[t.Any, t.Any] + ) -> None: ... + def __getattr__(self, name: str) -> t.Any: ... + +# this can maybe made better when variadic generics are available +class Select(Construct[ParsedType, BuildTypes]): + subcons: t.List[Construct[t.Any, t.Any]] + def __new__( + cls, *subcons: Construct[t.Any, t.Any], **subconskw: Construct[t.Any, t.Any] + ) -> Select[t.Any, t.Any]: ... + +def Optional( + subcon: Construct[SubconParsedType, SubconBuildTypes] +) -> Select[t.Union[SubconParsedType, None], t.Union[SubconBuildTypes, None]]: ... + +ThenParsedType = t.TypeVar("ThenParsedType") +ThenBuildTypes = t.TypeVar("ThenBuildTypes") +ElseParsedType = t.TypeVar("ElseParsedType") +ElseBuildTypes = t.TypeVar("ElseBuildTypes") + +# This does not represent the original code, but it is the only solution that works good with pyright +class _IfThenElse(Construct[ParsedType, BuildTypes]): + condfunc: ConstantOrContextLambda[bool] + thensubcon: Construct[ParsedType, BuildTypes] + elsesubcon: Construct[ParsedType, BuildTypes] + +def IfThenElse( + condfunc: ConstantOrContextLambda[bool], + thensubcon: Construct[ThenParsedType, ThenBuildTypes], + elsesubcon: Construct[ElseParsedType, ElseBuildTypes], +) -> _IfThenElse[ + t.Union[ThenParsedType, ElseParsedType], t.Union[ThenBuildTypes, ElseBuildTypes] +]: ... +def If( + condfunc: ConstantOrContextLambda[bool], + subcon: Construct[ThenParsedType, ThenBuildTypes], +) -> _IfThenElse[t.Union[ThenParsedType, None], t.Union[ThenBuildTypes, None]]: ... + +SwitchType = t.TypeVar("SwitchType") + +class Switch(Construct[ParsedType, BuildTypes]): + keyfunc: ConstantOrContextLambda[t.Any] + cases: t.Dict[t.Any, Construct[t.Any, t.Any]] + default: Construct[t.Any, t.Any] + @t.overload + def __new__( + cls, + keyfunc: ConstantOrContextLambda[SwitchType], + cases: t.Dict[SwitchType, Construct[int, int]], + default: t.Optional[Construct[int, int]] = ..., + ) -> Switch[int, t.Optional[int]]: ... + @t.overload + def __new__( + cls, + keyfunc: ConstantOrContextLambda[t.Any], + cases: t.Dict[t.Any, Construct[t.Any, t.Any]], + default: t.Optional[Construct[t.Any, t.Any]] = ..., + ) -> Switch[t.Any, t.Any]: ... + +class StopIf(Construct[ParsedType, BuildTypes]): + condfunc: ConstantOrContextLambda[bool] + def __new__(cls, condfunc: ConstantOrContextLambda[bool]) -> StopIf[None, None]: ... + +# =============================================================================== +# alignment and padding +# =============================================================================== +def Padding( + length: ConstantOrContextLambda[int], pattern: bytes = ... +) -> Padded[None, None]: ... + +class Padded( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + length: ConstantOrContextLambda[int] + pattern: bytes + def __init__( + self, + length: ConstantOrContextLambda[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + pattern: bytes = ..., + ) -> None: ... + +class Aligned( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + modulus: ConstantOrContextLambda[int] + pattern: bytes + def __init__( + self, + modulus: ConstantOrContextLambda[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + pattern: bytes = ..., + ) -> None: ... + +def AlignedStruct( + modulus: ConstantOrContextLambda[int], + *subcons: Construct[t.Any, t.Any], + **subconskw: Construct[t.Any, t.Any] +) -> Struct[Container[t.Any], t.Optional[t.Dict[str, t.Any]]]: ... +def BitStruct( + *subcons: Construct[t.Any, t.Any], **subconskw: Construct[t.Any, t.Any] +) -> t.Union[ + Transformed[Container[t.Any], t.Dict[str, t.Any]], + Restreamed[Container[t.Any], t.Dict[str, t.Any]], +]: ... + +# =============================================================================== +# stream manipulation +# =============================================================================== +class Pointer( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + offset: ConstantOrContextLambda[int] + stream: t.Optional[t.Callable[[Context], StreamType]] + def __init__( + self, + offset: ConstantOrContextLambda[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + stream: t.Optional[t.Callable[[Context], StreamType]] = ..., + ) -> None: ... + +class Peek(Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> Peek[ + SubconParsedType, + SubconBuildTypes, + SubconParsedType, + t.Union[SubconBuildTypes, None], + ]: ... + +class Seek(Construct[int, None]): + at: ConstantOrContextLambda[int] + if sys.version_info >= (3, 8): + WHENCE = t.Literal[0, 1, 2] + else: + WHENCE = int + whence: ConstantOrContextLambda[WHENCE] + def __init__( + self, + at: ConstantOrContextLambda[int], + whence: ConstantOrContextLambda[WHENCE] = ..., + ) -> None: ... + +Tell: Construct[int, None] +Pass: Construct[None, None] +Terminated: Construct[None, None] + +# =============================================================================== +# tunneling and byte/bit swapping +# =============================================================================== +@t.type_check_only +class RawCopyObj(t.Generic[ParsedType], Container[t.Any]): + data: bytes + value: ParsedType + offset1: int + offset2: int + length: int + +class RawCopy(Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + def __new__( + cls, subcon: Construct[SubconParsedType, SubconBuildTypes] + ) -> RawCopy[ + SubconParsedType, + SubconBuildTypes, + RawCopyObj[SubconParsedType], + t.Optional[t.Dict[str, t.Union[SubconBuildTypes, bytes]]], + ]: ... + +def ByteSwapped( + subcon: Construct[SubconParsedType, SubconBuildTypes] +) -> Transformed[SubconParsedType, SubconBuildTypes]: ... +def BitsSwapped( + subcon: Construct[SubconParsedType, SubconBuildTypes] +) -> t.Union[ + Transformed[SubconParsedType, SubconBuildTypes], + Restreamed[SubconParsedType, SubconBuildTypes], +]: ... + +class Prefixed( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + lengthfield: Construct[SubconParsedType, SubconBuildTypes] + includelength: t.Optional[bool] + def __init__( + self, + lengthfield: Construct[int, int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + includelength: t.Optional[bool] = ..., + ) -> None: ... + +def PrefixedArray( + countfield: Construct[int, int], + subcon: Construct[SubconParsedType, SubconBuildTypes], +) -> Array[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], +]: ... + +class FixedSized( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + length: ConstantOrContextLambda[int] + def __init__( + self, + length: ConstantOrContextLambda[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> None: ... + +class NullTerminated( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + term: bytes + include: t.Optional[bool] + consume: t.Optional[bool] + require: t.Optional[bool] + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + term: bytes = ..., + include: t.Optional[bool] = ..., + consume: t.Optional[bool] = ..., + require: t.Optional[bool] = ..., + ) -> None: ... + +class NullStripped( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + pad: bytes + def __init__( + self, subcon: Construct[SubconParsedType, SubconBuildTypes], pad: bytes = ... + ) -> None: ... + +class RestreamData( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, None] +): + datafunc: t.Union[ + bytes, io.BytesIO, Construct[bytes, t.Any], t.Callable[[Context], bytes] + ] + def __init__( + self, + datafunc: t.Union[ + bytes, io.BytesIO, Construct[bytes, t.Any], t.Callable[[Context], bytes] + ], + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> None: ... + +class Transformed( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + decodefunc: t.Callable[[bytes], bytes] + decodeamount: t.Optional[int] + encodefunc: t.Callable[[bytes], bytes] + encodeamount: t.Optional[int] + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + decodefunc: t.Callable[[bytes], bytes], + decodeamount: t.Optional[int], + encodefunc: t.Callable[[bytes], bytes], + encodeamount: t.Optional[int], + ) -> None: ... + +class Restreamed( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + decoder: t.Callable[[bytes], bytes] + decoderunit: int + encoder: t.Callable[[bytes], bytes] + encoderunit: int + sizecomputer: t.Callable[[int], int] + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + decoder: t.Callable[[bytes], bytes], + decoderunit: int, + encoder: t.Callable[[bytes], bytes], + encoderunit: int, + sizecomputer: t.Callable[[int], int], + ) -> None: ... + +class ProcessXor( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconParsedType] +): + padfunc: ConstantOrContextLambda2[t.Union[int, bytes]] + def __new__( + cls, + padfunc: ConstantOrContextLambda2[t.Union[int, bytes]], + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> ProcessXor[SubconParsedType, SubconBuildTypes]: ... + +class ProcessRotateLeft( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconParsedType] +): + amount: ConstantOrContextLambda2[int] + group: ConstantOrContextLambda2[int] + def __new__( + cls, + amount: ConstantOrContextLambda2[int], + group: ConstantOrContextLambda2[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> ProcessRotateLeft[SubconParsedType, SubconBuildTypes]: ... + +T = t.TypeVar("T") + +class Checksum(t.Generic[T, ParsedType, BuildTypes], Construct[ParsedType, BuildTypes]): + checksumfield: Construct[ParsedType, BuildTypes] + hashfunc: t.Callable[[T], BuildTypes] + bytesfunc: t.Callable[[Context], T] + def __init__( + self, + checksumfield: Construct[ParsedType, BuildTypes], + hashfunc: t.Callable[[T], BuildTypes], + bytesfunc: t.Callable[[Context], T], + ) -> None: ... + +class Compressed(Tunnel[SubconParsedType, SubconBuildTypes]): + encoding: str + level: t.Optional[int] + lib: t.Any + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + encoding: str, + level: t.Optional[int] = ..., + ) -> None: ... + +class CompressedLZ4(Tunnel[SubconParsedType, SubconBuildTypes]): + lib: t.Any + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> None: ... + +class Rebuffered( + Subconstruct[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + stream2: RebufferedBytesIO + def __init__( + self, + subcon: Construct[SubconParsedType, SubconBuildTypes], + tailcutoff: t.Optional[int] = ..., + ) -> None: ... + +# =============================================================================== +# lazy equivalents +# =============================================================================== +class Lazy(Subconstruct[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> Lazy[ + SubconParsedType, + SubconBuildTypes, + t.Callable[[], SubconParsedType], + t.Union[t.Callable[[], SubconParsedType], SubconParsedType], + ]: ... + +class LazyContainer(t.Generic[ContainerType], t.Dict[str, ContainerType]): + def __getattr__(self, name: str) -> ContainerType: ... + def __getitem__(self, index: t.Union[str, int]) -> ContainerType: ... + def keys(self) -> t.Iterator[str]: ... + def values(self) -> t.List[ContainerType]: ... + def items(self) -> t.List[t.Tuple[str, ContainerType]]: ... + +class LazyStruct(Construct[ParsedType, BuildTypes]): + subcons: t.List[Construct[t.Any, t.Any]] + def __new__( + cls, *subcons: Construct[t.Any, t.Any], **subconskw: Construct[t.Any, t.Any] + ) -> LazyStruct[LazyContainer[t.Any], t.Optional[t.Dict[str, t.Any]]]: ... + def __getattr__(self, name: str) -> t.Any: ... + +class LazyListContainer(t.List[ListType]): ... + +class LazyArray( + Subconstruct[ + SubconParsedType, + SubconBuildTypes, + ParsedType, + BuildTypes, + ] +): + count: ConstantOrContextLambda[int] + def __new__( + cls, + count: ConstantOrContextLambda[int], + subcon: Construct[SubconParsedType, SubconBuildTypes], + ) -> LazyArray[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ]: ... + +class LazyBound(Construct[ParsedType, BuildTypes]): + subconfunc: t.Callable[[], Construct[ParsedType, BuildTypes]] + def __new__( + cls, subconfunc: t.Callable[[], Construct[ParsedType, BuildTypes]] + ) -> LazyBound[ParsedType, BuildTypes]: ... + +# =============================================================================== +# adapters and validators +# =============================================================================== +class ExprAdapter(Adapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]): + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + decoder: t.Callable[[SubconParsedType, Context], ParsedType], + encoder: t.Callable[[BuildTypes, Context], SubconBuildTypes], + ) -> ExprAdapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes]: ... + +class ExprSymmetricAdapter( + ExprAdapter[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes] +): + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + encoder: t.Callable[[BuildTypes, Context], SubconBuildTypes], + ) -> ExprSymmetricAdapter[ + SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes + ]: ... + +class ExprValidator(Validator[SubconParsedType, SubconBuildTypes]): + def __new__( + cls, + subcon: Construct[SubconParsedType, SubconBuildTypes], + validator: t.Callable[[SubconParsedType, Context], bool], + ) -> ExprValidator[SubconParsedType, SubconBuildTypes]: ... + +def OneOf( + subcon: Construct[SubconParsedType, SubconBuildTypes], + valids: t.Container[SubconParsedType], +) -> ExprValidator[SubconParsedType, SubconBuildTypes]: ... +def NoneOf( + subcon: Construct[SubconParsedType, SubconBuildTypes], + invalids: t.Container[SubconParsedType], +) -> ExprValidator[SubconParsedType, SubconBuildTypes]: ... +def Filter( + predicate: t.Callable[[SubconParsedType, Context], bool], + subcon: Construct[SubconParsedType, SubconBuildTypes], +) -> ExprSymmetricAdapter[ + SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes +]: ... + +class Slicing( + Adapter[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + def __new__( + cls, + subcon: t.Union[ + Array[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ], + GreedyRange[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ], + ], + count: int, + start: t.Optional[int], + stop: t.Optional[int], + step: int = ..., + empty: t.Optional[SubconParsedType] = ..., + ) -> Slicing[ListContainer[SubconParsedType], t.List[SubconBuildTypes]]: ... + +class Indexing( + Adapter[SubconParsedType, SubconBuildTypes, SubconParsedType, SubconBuildTypes] +): + def __new__( + cls, + subcon: t.Union[ + Array[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ], + GreedyRange[ + SubconParsedType, + SubconBuildTypes, + ListContainer[SubconParsedType], + t.List[SubconBuildTypes], + ], + ], + count: int, + index: int, + empty: t.Optional[SubconParsedType] = ..., + ) -> Indexing[SubconParsedType, SubconBuildTypes]: ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/debug.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/debug.pyi new file mode 100644 index 0000000..1234663 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/debug.pyi @@ -0,0 +1,12 @@ +import typing as t + +from construct.core import Construct, Context, Subconstruct + +ContextLambda = t.Callable[[Context], t.Any] + +class Probe(Construct[None, None]): + def __init__( + self, into: t.Optional[ContextLambda] = ..., lookahead: int = ... + ) -> None: ... + +class Debugger(Subconstruct[None, None, None, None]): ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/expr.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/expr.pyi new file mode 100644 index 0000000..8450a44 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/expr.pyi @@ -0,0 +1,558 @@ +import operator +import typing as t + +from construct.core import * + +UniOperator = t.Callable[[t.Any], t.Any] +BinOperator = t.Callable[[t.Any, t.Any], t.Any] + +ReturnType = t.TypeVar("ReturnType") +LhsReturnType = t.TypeVar("LhsReturnType") +RhsReturnType = t.TypeVar("RhsReturnType") + +ConstOrCallable = t.Union[ReturnType, t.Callable[[ReturnType], ReturnType]] + + +class ExprMixin(t.Generic[ReturnType], object): + # __add__ ########################################################################################################## + @t.overload + def __add__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __add__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __add__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __add__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __add__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __add__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __add__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + + # __sub__ ########################################################################################################## + @t.overload + def __sub__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __sub__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __sub__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __sub__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __sub__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __sub__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __sub__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __sub__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __mul__ ########################################################################################################## + @t.overload + def __mul__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __mul__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __mul__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __mul__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __mul__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __mul__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __mul__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __mul__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __floordiv__ ##################################################################################################### + @t.overload + def __floordiv__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __floordiv__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __floordiv__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __floordiv__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __floordiv__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __floordiv__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __floordiv__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __floordiv__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __truediv__ ###################################################################################################### + @t.overload + def __truediv__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __truediv__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __div__ ########################################################################################################## + def __div__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __mod__ ########################################################################################################## + @t.overload + def __mod__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __mod__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __mod__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __mod__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __mod__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __mod__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __mod__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __mod__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __pow__ ########################################################################################################## + @t.overload + def __pow__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __pow__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __pow__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __pow__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __pow__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __pow__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __pow__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __pow__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __xor__ ########################################################################################################## + @t.overload + def __xor__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __xor__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __xor__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __xor__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rshift__ ####################################################################################################### + @t.overload + def __rshift__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rshift__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rshift__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rshift__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rshift__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __lshift__ ####################################################################################################### + @t.overload + def __lshift__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __lshift__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __lshift__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __lshift__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __lshift__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __and__ ########################################################################################################## + @t.overload + def __and__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __and__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __and__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __and__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __or__ ########################################################################################################### + @t.overload + def __or__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __or__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __or__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __or__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __radd__ ######################################################################################################### + @t.overload + def __radd__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __radd__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __radd__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __radd__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __radd__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __radd__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __radd__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __radd__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rsub__ ######################################################################################################### + @t.overload + def __rsub__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rsub__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rsub__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rsub__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rsub__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rsub__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rsub__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __rsub__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rmul__ ######################################################################################################### + @t.overload + def __rmul__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rmul__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rmul__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rmul__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rmul__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rmul__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rmul__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __rmul__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rfloordiv__ #################################################################################################### + @t.overload + def __rfloordiv__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rfloordiv__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rfloordiv__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rfloordiv__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rfloordiv__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rfloordiv__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rfloordiv__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __rfloordiv__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rtruediv__ ##################################################################################################### + @t.overload + def __rtruediv__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __rtruediv__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rdiv__ ######################################################################################################### + def __rdiv__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rmod__ ######################################################################################################### + @t.overload + def __rmod__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rmod__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rmod__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rmod__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rmod__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rmod__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rmod__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __rmod__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rpow__ ######################################################################################################### + @t.overload + def __rpow__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rpow__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rpow__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rpow__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rpow__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[float]: ... + @t.overload + def __rpow__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[float]: ... + @t.overload + def __rpow__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[float]: ... + @t.overload + def __rpow__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rxor__ ######################################################################################################### + @t.overload + def __rxor__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rxor__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rxor__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rxor__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rrshift__ ###################################################################################################### + @t.overload + def __rrshift__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rrshift__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rrshift__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rrshift__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rrshift__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rlshift__ ###################################################################################################### + @t.overload + def __rlshift__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rlshift__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rlshift__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rlshift__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rlshift__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __rand__ ######################################################################################################### + @t.overload + def __rand__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rand__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __rand__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __rand__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __ror__ ########################################################################################################## + @t.overload + def __ror__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __ror__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[int]: ... + @t.overload + def __ror__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[int]: ... + @t.overload + def __ror__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __contains__ ##################################################################################################### + def __contains__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __gt__ ########################################################################################################### + @t.overload + def __gt__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[bool]: ... + @t.overload + def __gt__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __ge__ ########################################################################################################### + @t.overload + def __ge__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[bool]: ... + @t.overload + def __ge__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __lt__ ########################################################################################################### + @t.overload + def __lt__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[bool]: ... + @t.overload + def __lt__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __le__ ########################################################################################################### + @t.overload + def __le__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __le__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __le__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __le__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __le__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __le__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __le__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[bool]: ... + @t.overload + def __le__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __eq__ ########################################################################################################### + @t.overload + def __eq__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[bool]: ... + @t.overload + def __eq__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __ne__ ########################################################################################################### + @t.overload + def __ne__(self: ExprMixin[int], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self: ExprMixin[int], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self: ExprMixin[bool], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self: ExprMixin[bool], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self: ExprMixin[float], other: ConstOrCallable[int]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self: ExprMixin[float], other: ConstOrCallable[bool]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self: ExprMixin[float], other: ConstOrCallable[float]) -> BinExpr[bool]: ... + @t.overload + def __ne__(self, other: t.Any) -> BinExpr[t.Any]: ... + + # __neg__ ########################################################################################################## + @t.overload + def __neg__(self: ExprMixin[int]) -> BinExpr[int]: ... + @t.overload + def __neg__(self: ExprMixin[bool]) -> BinExpr[int]: ... + @t.overload + def __neg__(self: ExprMixin[float]) -> BinExpr[float]: ... + @t.overload + def __neg__(self) -> UniExpr[t.Any]: ... + + # __pos__ ########################################################################################################## + @t.overload + def __pos__(self: ExprMixin[int]) -> BinExpr[int]: ... + @t.overload + def __pos__(self: ExprMixin[bool]) -> BinExpr[int]: ... + @t.overload + def __pos__(self: ExprMixin[float]) -> BinExpr[float]: ... + @t.overload + def __pos__(self) -> UniExpr[t.Any]: ... + + # __invert__ ####################################################################################################### + @t.overload + def __invert__(self: ExprMixin[int]) -> BinExpr[int]: ... + @t.overload + def __invert__(self: ExprMixin[bool]) -> BinExpr[int]: ... + @t.overload + def __invert__(self) -> UniExpr[t.Any]: ... + + # __inv__ ########################################################################################################## + def __inv__(self) -> UniExpr[t.Any]: ... + +class UniExpr(ExprMixin[ReturnType]): + def __init__(self, op: UniOperator, operand: t.Any) -> None: ... + def __call__(self, obj: t.Union[Context, dict[str, t.Any], t.Any], *args: t.Any) -> ReturnType: ... + +class BinExpr(ExprMixin[ReturnType]): + def __init__(self, op: BinOperator, lhs: t.Any, rhs: t.Any) -> None: ... + def __call__(self, obj: t.Union[Context, dict[str, t.Any], t.Any], *args: t.Any) -> ReturnType: ... + +class Path(ExprMixin[ReturnType]): + def __init__(self, name: str, field: t.Optional[str] = ..., parent: t.Optional[Path[t.Any]] = ...) -> None: ... + def __call__(self, obj: t.Union[Context, dict[str, t.Any], t.Any], *args: t.Any) -> ReturnType: ... + def __getattr__(self, name: str) -> Path[t.Any]: ... + def __getitem__(self, name: str) -> Path[t.Any]: ... + + +class Path2(ExprMixin[ReturnType]): + def __init__(self, name: str, index: t.Optional[int] = ..., parent: t.Optional[Path2[t.Any]] = ...) -> None: ... + def __call__(self, *args: t.Any) -> ReturnType: ... + def __getitem__(self, index: int) -> Path2[t.Any]: ... + + +class FuncPath(ExprMixin[ReturnType]): + def __init__(self, func: t.Callable[[t.Any], t.Any], operand: t.Optional[t.Any] = ...) -> None: ... + def __call__(self, operand: t.Any, *args: t.Any) -> ReturnType: ... + + +this: Path[t.Any] +obj_: Path[t.Any] +list_: Path2[t.Any] + +len_: FuncPath[int] +sum_: FuncPath[int] +min_: FuncPath[int] +max_: FuncPath[int] +abs_: FuncPath[int] diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/lib/__init__.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/lib/__init__.pyi new file mode 100644 index 0000000..24c6562 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/lib/__init__.pyi @@ -0,0 +1,52 @@ +from construct.lib.binary import * +from construct.lib.bitstream import * +from construct.lib.containers import * +from construct.lib.hex import * +from construct.lib.py3compat import * + +__all__ = [ + 'bits2bytes', + 'bits2integer', + 'byte2int', + 'bytes2bits', + 'bytes2integer', + 'bytes2integers', + 'bytes2str', + 'bytestringtype', + 'Container', + 'globalPrintFalseFlags', + 'globalPrintFullStrings', + 'HexDisplayedBytes', + 'HexDisplayedDict', + 'HexDisplayedInteger', + 'hexdump', + 'HexDumpDisplayedBytes', + 'HexDumpDisplayedDict', + 'hexlify', + 'hexundump', + 'int2byte', + 'integer2bits', + 'integer2bytes', + 'integers2bytes', + 'integertypes', + 'ListContainer', + 'ONWINDOWS', + 'PY', + 'PY2', + 'PY3', + 'PYPY', + 'RebufferedBytesIO', + 'reprstring', + 'RestreamedBytesIO', + 'setGlobalPrintFalseFlags', + 'setGlobalPrintFullStrings', + 'setGlobalPrintPrivateEntries', + 'str2bytes', + 'stringtypes', + 'swapbitsinbytes', + 'swapbytes', + 'swapbytesinbits', + 'trimstring', + 'unhexlify', + 'unicodestringtype', +] diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/lib/binary.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/lib/binary.pyi new file mode 100644 index 0000000..69ef57a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/lib/binary.pyi @@ -0,0 +1,11 @@ +def integer2bits(number: int, width: int, signed: bool = ...) -> bytes: ... +def integer2bytes(number: int, width: int, signed: bool = ...) -> bytes: ... +def bits2integer(data: bytes, signed: bool = ...) -> int: ... +def bytes2integer(data: bytes, signed: bool = ...) -> int: ... +def bytes2bits(data: bytes) -> bytes: ... +def bits2bytes(data: bytes) -> bytes: ... +def swapbytes(data: bytes) -> bytes: ... +def swapbytesinbits(data: bytes) -> bytes: ... +def swapbitsinbytes(data: bytes) -> bytes: ... +def hexlify(data: bytes) -> bytes: ... +def unhexlify(data: bytes) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/lib/bitstream.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/lib/bitstream.pyi new file mode 100644 index 0000000..6785ea5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/lib/bitstream.pyi @@ -0,0 +1,45 @@ +import io +import typing as t + +class RestreamedBytesIO(object): + substream: t.Optional[io.BytesIO] + encoder: t.Callable[[bytes], bytes] + encoderunit: int + decoder: t.Callable[[bytes], bytes] + decoderunit: int + rbuffer: bytes + wbuffer: bytes + sincereadwritten: int + def __init__( + self, + substream: t.Optional[io.BytesIO], + decoder: t.Callable[[bytes], bytes], + decoderunit: int, + encoder: t.Callable[[bytes], bytes], + encoderunit: int, + ) -> None: ... + def read(self, count: t.Optional[int] = ...) -> bytes: ... + def write(self, data: t.Union[bytes, bytearray, memoryview]) -> int: ... + def close(self) -> None: ... + def seek(self, at: int, whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def tellable(self) -> bool: ... + +class RebufferedBytesIO(object): + substream: t.Optional[io.BytesIO] + offset: int + rwbuffer: bytes + moved: int + tailcutoff: t.Optional[int] + def __init__( + self, substream: t.Optional[io.BytesIO], tailcutoff: t.Optional[int] = ... + ) -> None: ... + def read(self, count: t.Optional[int] = ...) -> bytes: ... + def write(self, data: t.Union[bytes, bytearray, memoryview]) -> int: ... + def seek(self, at: int, whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def tellable(self) -> bool: ... + def cachedfrom(self) -> int: ... + def cachedto(self) -> int: ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/lib/containers.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/lib/containers.pyi new file mode 100644 index 0000000..a50033a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/lib/containers.pyi @@ -0,0 +1,31 @@ +import re +import typing as t + +ContainerType = t.TypeVar("ContainerType") +ListType = t.TypeVar("ListType") + +SearchPattern = t.Union[t.AnyStr, re.Pattern[t.AnyStr]] + +globalPrintFullStrings: bool +globalPrintFalseFlags: bool +globalPrintPrivateEntries: bool + +def setGlobalPrintFullStrings(enabled: bool = ...) -> None: ... +def setGlobalPrintFalseFlags(enabled: bool = ...) -> None: ... +def setGlobalPrintPrivateEntries(enabled: bool = ...) -> None: ... +def recursion_lock( + retval: str = ..., lock_name: str = ... +) -> t.Callable[[t.Callable[..., str]], t.Callable[..., str]]: ... + +class Container(t.Generic[ContainerType], t.Dict[str, ContainerType]): + def __getattr__(self, name: str) -> ContainerType: ... + def update( + self, + seqordict: t.Union[t.Dict[str, ContainerType], t.Tuple[str, ContainerType]], + ) -> None: ... + def search(self, pattern: SearchPattern[t.Any]) -> t.Any: ... + def search_all(self, pattern: SearchPattern[t.Any]) -> t.Any: ... + +class ListContainer(t.List[ListType]): + def search(self, pattern: SearchPattern[t.Any]) -> t.Any: ... + def search_all(self, pattern: SearchPattern[t.Any]) -> t.Any: ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/lib/hex.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/lib/hex.pyi new file mode 100644 index 0000000..afa985f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/lib/hex.pyi @@ -0,0 +1,12 @@ +import typing as t + + +class HexDisplayedInteger(int): ... +class HexDisplayedBytes(bytes): ... + +K = t.TypeVar("K") +V = t.TypeVar("V") + +class HexDisplayedDict(t.Dict[K, V]): ... +class HexDumpDisplayedBytes(bytes): ... +class HexDumpDisplayedDict(t.Dict[K, V]): ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/lib/py3compat.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/lib/py3compat.pyi new file mode 100644 index 0000000..f105096 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/lib/py3compat.pyi @@ -0,0 +1,20 @@ +import typing as t + +PY2: bool +PY3: bool +PYPY: bool +ONWINDOWS: bool + +stringtypes: t.Tuple[t.Type[bytes], t.Type[str]] +integertypes: t.Tuple[t.Type[int]] +unicodestringtype: t.Type[str] +bytestringtype: t.Type[bytes] + +def int2byte(character: int) -> bytes: ... +def byte2int(character: bytes) -> int: ... +def str2bytes(string: str) -> bytes: ... +def bytes2str(string: bytes) -> str: ... +def reprstring(data: t.Union[bytes, str]) -> str: ... +def trimstring(data: t.Union[bytes, str]) -> str: ... +def integers2bytes(ints: t.Iterable[int]) -> bytes: ... +def bytes2integers(data: bytes) -> list[int]: ... diff --git a/.venv/lib/python3.9/site-packages/construct-stubs/version.pyi b/.venv/lib/python3.9/site-packages/construct-stubs/version.pyi new file mode 100644 index 0000000..79441c7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct-stubs/version.pyi @@ -0,0 +1,5 @@ +import typing as t + +version: t.Tuple[int, int, int] +version_string: str +release_date: str diff --git a/.venv/lib/python3.9/site-packages/construct/__init__.py b/.venv/lib/python3.9/site-packages/construct/__init__.py new file mode 100644 index 0000000..fdc718b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/__init__.py @@ -0,0 +1,211 @@ +r""" +Construct 2 -- Parsing Made Fun + +Homepage: + https://github.com/construct/construct + http://construct.readthedocs.org + +Hands-on example: + >>> from construct import * + >>> s = Struct( + ... "a" / Byte, + ... "b" / Short, + ... ) + >>> print s.parse(b"\x01\x02\x03") + Container: + a = 1 + b = 515 + >>> s.build(Container(a=1, b=0x0203)) + b"\x01\x02\x03" +""" + +from construct.core import * +from construct.expr import * +from construct.debug import * +from construct.version import * +from construct import lib + + +#=============================================================================== +# metadata +#=============================================================================== +__author__ = "Arkadiusz Bulski , Tomer Filiba , Corbin Simpson " +__version__ = version_string + +#=============================================================================== +# exposed names +#=============================================================================== +__all__ = [ + '__author__', + '__version__', + 'abs_', + 'AdaptationError', + 'Adapter', + 'Aligned', + 'AlignedStruct', + 'Array', + 'Bit', + 'BitsInteger', + 'BitsSwapped', + 'BitStruct', + 'BitwisableString', + 'Bitwise', + 'Byte', + 'Bytes', + 'BytesInteger', + 'ByteSwapped', + 'Bytewise', + 'CancelParsing', + 'Check', + 'CheckError', + 'Checksum', + 'ChecksumError', + 'Compiled', + 'Compressed', + 'CompressedLZ4', + 'Computed', + 'Const', + 'ConstError', + 'Construct', + 'ConstructError', + 'Container', + 'CString', + 'Debugger', + 'Default', + 'Double', + 'Enum', + 'EnumInteger', + 'EnumIntegerString', + 'Error', + 'ExplicitError', + 'ExprAdapter', + 'ExprSymmetricAdapter', + 'ExprValidator', + 'Filter', + 'FixedSized', + 'Flag', + 'FlagsEnum', + 'FocusedSeq', + 'FormatField', + 'FormatFieldError', + 'FuncPath', + 'globalPrintFalseFlags', + 'globalPrintFullStrings', + 'GreedyBytes', + 'GreedyRange', + 'GreedyString', + 'Half', + 'Hex', + 'HexDump', + 'If', + 'IfThenElse', + 'Index', + 'IndexFieldError', + 'Indexing', + 'Int', + 'IntegerError', + 'Lazy', + 'LazyArray', + 'LazyBound', + 'LazyContainer', + 'LazyListContainer', + 'LazyStruct', + 'len_', + 'lib', + 'list_', + 'ListContainer', + 'Long', + 'Mapping', + 'MappingError', + 'max_', + 'min_', + 'NamedTuple', + 'NamedTupleError', + 'Nibble', + 'NoneOf', + 'NullStripped', + 'NullTerminated', + 'Numpy', + 'obj_', + 'Octet', + 'OneOf', + 'Optional', + 'Padded', + 'PaddedString', + 'Padding', + 'PaddingError', + 'PascalString', + 'Pass', + 'Path', + 'Path2', + 'Peek', + 'Pickled', + 'Pointer', + 'possiblestringencodings', + 'Prefixed', + 'PrefixedArray', + 'Probe', + 'ProcessRotateLeft', + 'ProcessXor', + 'RangeError', + 'RawCopy', + 'Rebuffered', + 'RebufferedBytesIO', + 'Rebuild', + 'release_date', + 'Renamed', + 'RepeatError', + 'RepeatUntil', + 'RestreamData', + 'Restreamed', + 'RestreamedBytesIO', + 'RotationError', + 'Seek', + 'Select', + 'SelectError', + 'Sequence', + 'setGlobalPrintFalseFlags', + 'setGlobalPrintFullStrings', + 'setGlobalPrintPrivateEntries', + 'Short', + 'Single', + 'SizeofError', + 'Slicing', + 'StopFieldError', + 'StopIf', + 'stream_iseof', + 'stream_read', + 'stream_read_entire', + 'stream_seek', + 'stream_size', + 'stream_tell', + 'stream_write', + 'StreamError', + 'StringEncoded', + 'StringError', + 'Struct', + 'Subconstruct', + 'sum_', + 'Switch', + 'SwitchError', + 'SymmetricAdapter', + 'Tell', + 'Terminated', + 'TerminatedError', + 'this', + 'Timestamp', + 'TimestampAdapter', + 'TimestampError', + 'Transformed', + 'Tunnel', + 'Union', + 'UnionError', + 'ValidationError', + 'Validator', + 'VarInt', + 'version', + 'version_string', + 'ZigZag', +] +__all__ += ["Int%s%s%s" % (n,us,bln) for n in (8,16,24,32,64) for us in "us" for bln in "bln"] +__all__ += ["Float%s%s" % (n,bln) for n in (16,32,64) for bln in "bln"] diff --git a/.venv/lib/python3.9/site-packages/construct/core.py b/.venv/lib/python3.9/site-packages/construct/core.py new file mode 100644 index 0000000..ca39377 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/core.py @@ -0,0 +1,6111 @@ +# -*- coding: utf-8 -*- + +import struct, io, binascii, itertools, collections, pickle, sys, os, hashlib, importlib + +from construct.lib import * +from construct.expr import * +from construct.version import * + + +#=============================================================================== +# exceptions +#=============================================================================== +class ConstructError(Exception): + def __init__(self, message='', path=None): + self.path = path + if path is None: + super().__init__(message) + else: + message = "Error in path {}\n".format(path) + message + super().__init__(message) +class SizeofError(ConstructError): + pass +class AdaptationError(ConstructError): + pass +class ValidationError(ConstructError): + pass +class StreamError(ConstructError): + pass +class FormatFieldError(ConstructError): + pass +class IntegerError(ConstructError): + pass +class StringError(ConstructError): + pass +class MappingError(ConstructError): + pass +class RangeError(ConstructError): + pass +class RepeatError(ConstructError): + pass +class ConstError(ConstructError): + pass +class IndexFieldError(ConstructError): + pass +class CheckError(ConstructError): + pass +class ExplicitError(ConstructError): + pass +class NamedTupleError(ConstructError): + pass +class TimestampError(ConstructError): + pass +class UnionError(ConstructError): + pass +class SelectError(ConstructError): + pass +class SwitchError(ConstructError): + pass +class StopFieldError(ConstructError): + pass +class PaddingError(ConstructError): + pass +class TerminatedError(ConstructError): + pass +class RawCopyError(ConstructError): + pass +class RotationError(ConstructError): + pass +class ChecksumError(ConstructError): + pass +class CancelParsing(ConstructError): + pass + + +#=============================================================================== +# used internally +#=============================================================================== +def singleton(arg): + x = arg() + return x + + +def stream_read(stream, length, path): + if length < 0: + raise StreamError("length must be non-negative, found %s" % length, path=path) + try: + data = stream.read(length) + except Exception: + raise StreamError("stream.read() failed, requested %s bytes" % (length,), path=path) + if len(data) != length: + raise StreamError("stream read less than specified amount, expected %d, found %d" % (length, len(data)), path=path) + return data + + +def stream_read_entire(stream, path): + try: + return stream.read() + except Exception: + raise StreamError("stream.read() failed when reading until EOF", path=path) + + +def stream_write(stream, data, length, path): + if not isinstance(data, bytestringtype): + raise StringError("given non-bytes value, perhaps unicode? %r" % (data,), path=path) + if length < 0: + raise StreamError("length must be non-negative, found %s" % length, path=path) + if len(data) != length: + raise StreamError("bytes object of wrong length, expected %d, found %d" % (length, len(data)), path=path) + try: + written = stream.write(data) + except Exception: + raise StreamError("stream.write() failed, given %r" % (data,), path=path) + if written != length: + raise StreamError("stream written less than specified, expected %d, written %d" % (length, written), path=path) + + +def stream_seek(stream, offset, whence, path): + try: + return stream.seek(offset, whence) + except Exception: + raise StreamError("stream.seek() failed, offset %s, whence %s" % (offset, whence), path=path) + + +def stream_tell(stream, path): + try: + return stream.tell() + except Exception: + raise StreamError("stream.tell() failed", path=path) + + +def stream_size(stream): + fallback = stream.tell() + end = stream.seek(0, 2) + stream.seek(fallback) + return end + + +def stream_iseof(stream): + fallback = stream.tell() + data = stream.read(1) + stream.seek(fallback) + return not data + + +class CodeGen: + def __init__(self): + self.blocks = [] + self.nextid = 0 + self.parsercache = {} + self.buildercache = {} + self.linkedinstances = {} + self.linkedparsers = {} + self.linkedbuilders = {} + + def allocateId(self): + self.nextid += 1 + return self.nextid + + def append(self, block): + block = [s for s in block.splitlines() if s.strip()] + firstline = block[0] + trim = len(firstline) - len(firstline.lstrip()) + block = "\n".join(s[trim:] for s in block) + if block not in self.blocks: + self.blocks.append(block) + + def toString(self): + return "\n".join(self.blocks + [""]) + + +class KsyGen: + def __init__(self): + self.instances = {} + self.enums = {} + self.types = {} + self.nextid = 0 + + def allocateId(self): + self.nextid += 1 + return self.nextid + + +def hyphenatedict(d): + return {k.replace("_","-").rstrip("-"):v for k,v in d.items()} + + +def hyphenatelist(l): + return [hyphenatedict(d) for d in l] + + +def extractfield(sc): + if isinstance(sc, Renamed): + return extractfield(sc.subcon) + return sc + + +def evaluate(param, context): + return param(context) if callable(param) else param + + +#=============================================================================== +# abstract constructs +#=============================================================================== +class Construct(object): + r""" + The mother of all constructs. + + This object is generally not directly instantiated, and it does not directly implement parsing and building, so it is largely only of interest to subclass implementors. There are also other abstract classes sitting on top of this one. + + The external user API: + + * `parse` + * `parse_stream` + * `parse_file` + * `build` + * `build_stream` + * `build_file` + * `sizeof` + * `compile` + * `benchmark` + + Subclass authors should not override the external methods. Instead, another API is available: + + * `_parse` + * `_build` + * `_sizeof` + * `_actualsize` + * `_emitparse` + * `_emitbuild` + * `_emitseq` + * `_emitprimitivetype` + * `_emitfulltype` + * `__getstate__` + * `__setstate__` + + Attributes and Inheritance: + + All constructs have a name and flags. The name is used for naming struct members and context dictionaries. Note that the name can be a string, or None by default. A single underscore "_" is a reserved name, used as up-level in nested containers. The name should be descriptive, short, and valid as a Python identifier, although these rules are not enforced. The flags specify additional behavioral information about this construct. Flags are used by enclosing constructs to determine a proper course of action. Flags are often inherited from inner subconstructs but that depends on each class. + """ + + def __init__(self): + self.name = None + self.docs = "" + self.flagbuildnone = False + self.parsed = None + + def __repr__(self): + return "<%s%s%s%s>" % (self.__class__.__name__, " "+self.name if self.name else "", " +nonbuild" if self.flagbuildnone else "", " +docs" if self.docs else "", ) + + def __getstate__(self): + attrs = {} + if hasattr(self, "__dict__"): + attrs.update(self.__dict__) + slots = [] + c = self.__class__ + while c is not None: + if hasattr(c, "__slots__"): + slots.extend(c.__slots__) + c = c.__base__ + for name in slots: + if hasattr(self, name): + attrs[name] = getattr(self, name) + return attrs + + def __setstate__(self, attrs): + for name, value in attrs.items(): + setattr(self, name, value) + + def __copy__(self): + self2 = object.__new__(self.__class__) + self2.__setstate__(self.__getstate__()) + return self2 + + def parse(self, data, **contextkw): + r""" + Parse an in-memory buffer (often bytes object). Strings, buffers, memoryviews, and other complete buffers can be parsed with this method. + + Whenever data cannot be read, ConstructError or its derivative is raised. This method is NOT ALLOWED to raise any other exceptions although (1) user-defined lambdas can raise arbitrary exceptions which are propagated (2) external libraries like numpy can raise arbitrary exceptions which are propagated (3) some list and dict lookups can raise IndexError and KeyError which are propagated. + + Context entries are passed only as keyword parameters \*\*contextkw. + + :param \*\*contextkw: context entries, usually empty + + :returns: some value, usually based on bytes read from the stream but sometimes it is computed from nothing or from the context dictionary, sometimes its non-deterministic + + :raises ConstructError: raised for any reason + """ + return self.parse_stream(io.BytesIO(data), **contextkw) + + def parse_stream(self, stream, **contextkw): + r""" + Parse a stream. Files, pipes, sockets, and other streaming sources of data are handled by this method. See parse(). + """ + context = Container(**contextkw) + context._parsing = True + context._building = False + context._sizing = False + context._params = context + try: + return self._parsereport(stream, context, "(parsing)") + except CancelParsing: + pass + + def parse_file(self, filename, **contextkw): + r""" + Parse a closed binary file. See parse(). + """ + with open(filename, 'rb') as f: + return self.parse_stream(f, **contextkw) + + def _parsereport(self, stream, context, path): + obj = self._parse(stream, context, path) + if self.parsed is not None: + self.parsed(obj, context) + return obj + + def _parse(self, stream, context, path): + """Override in your subclass.""" + raise NotImplementedError + + def build(self, obj, **contextkw): + r""" + Build an object in memory (a bytes object). + + Whenever data cannot be written, ConstructError or its derivative is raised. This method is NOT ALLOWED to raise any other exceptions although (1) user-defined lambdas can raise arbitrary exceptions which are propagated (2) external libraries like numpy can raise arbitrary exceptions which are propagated (3) some list and dict lookups can raise IndexError and KeyError which are propagated. + + Context entries are passed only as keyword parameters \*\*contextkw. + + :param \*\*contextkw: context entries, usually empty + + :returns: bytes + + :raises ConstructError: raised for any reason + """ + stream = io.BytesIO() + self.build_stream(obj, stream, **contextkw) + return stream.getvalue() + + def build_stream(self, obj, stream, **contextkw): + r""" + Build an object directly into a stream. See build(). + """ + context = Container(**contextkw) + context._parsing = False + context._building = True + context._sizing = False + context._params = context + self._build(obj, stream, context, "(building)") + + def build_file(self, obj, filename, **contextkw): + r""" + Build an object into a closed binary file. See build(). + """ + # Open the file for reading as well as writing. This allows builders to + # read back the stream just written. For example. RawCopy does this. + # See issue #888. + with open(filename, 'w+b') as f: + self.build_stream(obj, f, **contextkw) + + def _build(self, obj, stream, context, path): + """Override in your subclass.""" + raise NotImplementedError + + def sizeof(self, **contextkw): + r""" + Calculate the size of this object, optionally using a context. + + Some constructs have fixed size (like FormatField), some have variable-size and can determine their size given a context entry (like Bytes(this.otherfield1)), and some cannot determine their size (like VarInt). + + Whenever size cannot be determined, SizeofError is raised. This method is NOT ALLOWED to raise any other exception, even if eg. context dictionary is missing a key, or subcon propagates ConstructError-derivative exception. + + Context entries are passed only as keyword parameters \*\*contextkw. + + :param \*\*contextkw: context entries, usually empty + + :returns: integer if computable, SizeofError otherwise + + :raises SizeofError: size could not be determined in actual context, or is impossible to be determined + """ + context = Container(**contextkw) + context._parsing = False + context._building = False + context._sizing = True + context._params = context + return self._sizeof(context, "(sizeof)") + + def _sizeof(self, context, path): + """Override in your subclass.""" + raise SizeofError(path=path) + + def _actualsize(self, stream, context, path): + return self._sizeof(context, path) + + def compile(self, filename=None): + """ + Transforms a construct into another construct that does same thing (has same parsing and building semantics) but is much faster when parsing. Already compiled instances just compile into itself. + + Optionally, partial source code can be saved to a text file. This is meant only to inspect the generated code, not to import it from external scripts. + + :returns: Compiled instance + """ + + code = CodeGen() + code.append(""" + # generated by Construct, this source is for inspection only! do not import! + + from construct import * + from construct.lib import * + from io import BytesIO + import struct + import collections + import itertools + + def restream(data, func): + return func(BytesIO(data)) + def reuse(obj, func): + return func(obj) + + linkedinstances = {} + linkedparsers = {} + linkedbuilders = {} + + len_ = len + sum_ = sum + min_ = min + max_ = max + abs_ = abs + """) + code.append(f""" + def parseall(io, this): + return {self._compileparse(code)} + def buildall(obj, io, this): + return {self._compilebuild(code)} + compiled = Compiled(parseall, buildall) + """) + source = code.toString() + + if filename: + with open(filename, "wt") as f: + f.write(source) + + modulename = hexlify(hashlib.sha1(source.encode()).digest()).decode() + module_spec = importlib.machinery.ModuleSpec(modulename, None) + module = importlib.util.module_from_spec(module_spec) + c = compile(source, '', 'exec') + exec(c, module.__dict__) + + module.linkedinstances = code.linkedinstances + module.linkedparsers = code.linkedparsers + module.linkedbuilders = code.linkedbuilders + compiled = module.compiled + compiled.source = source + compiled.module = module + compiled.modulename = modulename + compiled.defersubcon = self + return compiled + + def _compileinstance(self, code): + """Used internally.""" + if id(self) in code.linkedinstances: + return + code.append(f""" + # linkedinstances[{id(self)}] is {self} + """) + field = extractfield(self) + code.linkedinstances[id(self)] = field + code.linkedparsers[id(self)] = field._parse + code.linkedbuilders[id(self)] = field._build + + def _compileparse(self, code): + """Used internally.""" + try: + if id(self) in code.parsercache: + return code.parsercache[id(self)] + emitted = self._emitparse(code) + code.parsercache[id(self)] = emitted + return emitted + except NotImplementedError: + self._compileinstance(code) + return f"linkedparsers[{id(self)}](io, this, '(???)')" + + def _compilebuild(self, code): + """Used internally.""" + try: + if id(self) in code.buildercache: + return code.buildercache[id(self)] + emitted = self._emitbuild(code) + code.buildercache[id(self)] = emitted + return emitted + except NotImplementedError: + self._compileinstance(code) + return f"linkedbuilders[{id(self)}](obj, io, this, '(???)')" + + def _emitparse(self, code): + """Override in your subclass.""" + raise NotImplementedError + + def _emitbuild(self, code): + """Override in your subclass.""" + raise NotImplementedError + + def benchmark(self, sampledata, filename=None): + """ + Measures performance of your construct (its parsing and building runtime), both for the original instance and the compiled instance. Uses timeit module, over at min 1 loop, and at max over 100 millisecond time. + + Optionally, results are saved to a text file for later inspection. Otherwise you can print the resulting string to terminal. + + :param sampledata: bytes, a valid blob parsable by this construct + :param filename: optional, string, results are saved to that file + + :returns: string containing measurements + """ + from timeit import timeit + + sampleobj = self.parse(sampledata) + parsetime = timeit(lambda: self.parse(sampledata), number=1) + runs = int(0.1/parsetime) + if runs > 1: + parsetime = timeit(lambda: self.parse(sampledata), number=runs)/runs + parsetime = "{:.10f} sec/call".format(parsetime) + + self.build(sampleobj) + buildtime = timeit(lambda: self.build(sampleobj), number=1) + runs = int(0.1/buildtime) + if runs > 1: + buildtime = timeit(lambda: self.build(sampleobj), number=runs)/runs + buildtime = "{:.10f} sec/call".format(buildtime) + + compiled = self.compile() + compiled.parse(sampledata) + parsetime2 = timeit(lambda: compiled.parse(sampledata), number=1) + runs = int(0.1/parsetime2) + if runs > 1: + parsetime2 = timeit(lambda: compiled.parse(sampledata), number=runs)/runs + parsetime2 = "{:.10f} sec/call".format(parsetime2) + + compiled.build(sampleobj) + buildtime2 = timeit(lambda: compiled.build(sampleobj), number=1) + runs = int(0.1/buildtime2) + if runs > 1: + buildtime2 = timeit(lambda: compiled.build(sampleobj), number=runs)/runs + buildtime2 = "{:.10f} sec/call".format(buildtime2) + + lines = [ + "Compiled instance performance:", + "parsing: {}", + "parsing compiled: {}", + "building: {}", + "building compiled: {}", + "" + ] + results = "\n".join(lines).format(parsetime, parsetime2, buildtime, buildtime2) + + if filename: + with open(filename, "wt") as f: + f.write(results) + + return results + + def export_ksy(self, schemaname="unnamed_schema", filename=None): + from ruamel.yaml import YAML + yaml = YAML() + yaml.default_flow_style = False + output = io.StringIO() + gen = KsyGen() + main = dict(meta=dict(id=schemaname), seq=self._compileseq(gen), instances=gen.instances, enums=gen.enums, types=gen.types) + yaml.dump(main, output) + source = output.getvalue() + + if filename: + with open(filename, "wt") as f: + f.write(source) + return source + + def _compileseq(self, ksy, bitwise=False, recursion=0): + if recursion >= 3: + raise ConstructError("construct does not implement KSY export") + try: + return hyphenatelist(self._emitseq(ksy, bitwise)) + except NotImplementedError: + return [dict(id="x", **self._compilefulltype(ksy, bitwise, recursion+1))] + + def _compileprimitivetype(self, ksy, bitwise=False, recursion=0): + if recursion >= 3: + raise ConstructError("construct does not implement KSY export") + try: + return self._emitprimitivetype(ksy, bitwise) + except NotImplementedError: + name = "type_%s" % ksy.allocateId() + ksy.types[name] = dict(seq=self._compileseq(ksy, bitwise, recursion+1)) + return name + + def _compilefulltype(self, ksy, bitwise=False, recursion=0): + if recursion >= 3: + raise ConstructError("construct does not implement KSY export") + try: + return hyphenatedict(self._emitfulltype(ksy, bitwise)) + except NotImplementedError: + return dict(type=self._compileprimitivetype(ksy, bitwise, recursion+1)) + + def _emitseq(self, ksy, bitwise): + """Override in your subclass.""" + raise NotImplementedError + + def _emitprimitivetype(self, ksy, bitwise): + """Override in your subclass.""" + raise NotImplementedError + + def _emitfulltype(self, ksy, bitwise): + """Override in your subclass.""" + raise NotImplementedError + + def __rtruediv__(self, name): + """ + Used for renaming subcons, usually part of a Struct, like Struct("index" / Byte). + """ + return Renamed(self, newname=name) + + __rdiv__ = __rtruediv__ + + def __mul__(self, other): + """ + Used for adding docstrings and parsed hooks to subcons, like "field" / Byte * "docstring" * processfunc. + """ + if isinstance(other, stringtypes): + return Renamed(self, newdocs=other) + if callable(other): + return Renamed(self, newparsed=other) + raise ConstructError("operator * can only be used with string or lambda") + + def __rmul__(self, other): + """ + Used for adding docstrings and parsed hooks to subcons, like "field" / Byte * "docstring" * processfunc. + """ + if isinstance(other, stringtypes): + return Renamed(self, newdocs=other) + if callable(other): + return Renamed(self, newparsed=other) + raise ConstructError("operator * can only be used with string or lambda") + + def __add__(self, other): + """ + Used for making Struct like ("index"/Byte + "prefix"/Byte). + """ + lhs = self.subcons if isinstance(self, Struct) else [self] + rhs = other.subcons if isinstance(other, Struct) else [other] + return Struct(*(lhs + rhs)) + + def __rshift__(self, other): + """ + Used for making Sequences like (Byte >> Short). + """ + lhs = self.subcons if isinstance(self, Sequence) else [self] + rhs = other.subcons if isinstance(other, Sequence) else [other] + return Sequence(*(lhs + rhs)) + + def __getitem__(self, count): + """ + Used for making Arrays like Byte[5] and Byte[this.count]. + """ + if isinstance(count, slice): + raise ConstructError("subcon[N] syntax can only be used for Arrays, use GreedyRange(subcon) instead?") + if isinstance(count, int) or callable(count): + return Array(count, self) + raise ConstructError("subcon[N] syntax expects integer or context lambda") + + +class Subconstruct(Construct): + r""" + Abstract subconstruct (wraps an inner construct, inheriting its name and flags). Parsing and building is by default deferred to subcon, same as sizeof. + + :param subcon: Construct instance + """ + def __init__(self, subcon): + if not isinstance(subcon, Construct): + raise TypeError("subcon should be a Construct field") + super().__init__() + self.subcon = subcon + self.flagbuildnone = subcon.flagbuildnone + + def __repr__(self): + return "<%s%s%s%s %s>" % (self.__class__.__name__, " "+self.name if self.name else "", " +nonbuild" if self.flagbuildnone else "", " +docs" if self.docs else "", repr(self.subcon), ) + + def _parse(self, stream, context, path): + return self.subcon._parsereport(stream, context, path) + + def _build(self, obj, stream, context, path): + return self.subcon._build(obj, stream, context, path) + + def _sizeof(self, context, path): + return self.subcon._sizeof(context, path) + + +class Adapter(Subconstruct): + r""" + Abstract adapter class. + + Needs to implement `_decode()` for parsing and `_encode()` for building. + + :param subcon: Construct instance + """ + def _parse(self, stream, context, path): + obj = self.subcon._parsereport(stream, context, path) + return self._decode(obj, context, path) + + def _build(self, obj, stream, context, path): + obj2 = self._encode(obj, context, path) + buildret = self.subcon._build(obj2, stream, context, path) + return obj + + def _decode(self, obj, context, path): + raise NotImplementedError + + def _encode(self, obj, context, path): + raise NotImplementedError + + +class SymmetricAdapter(Adapter): + r""" + Abstract adapter class. + + Needs to implement `_decode()` only, for both parsing and building. + + :param subcon: Construct instance + """ + def _encode(self, obj, context, path): + return self._decode(obj, context, path) + + +class Validator(SymmetricAdapter): + r""" + Abstract class that validates a condition on the encoded/decoded object. + + Needs to implement `_validate()` that returns a bool (or a truthy value) + + :param subcon: Construct instance + """ + def _decode(self, obj, context, path): + if not self._validate(obj, context, path): + raise ValidationError("object failed validation: %s" % (obj,), path=path) + return obj + + def _validate(self, obj, context, path): + raise NotImplementedError + + +class Tunnel(Subconstruct): + r""" + Abstract class that allows other constructs to read part of the stream as if they were reading the entire stream. See Prefixed for example. + + Needs to implement `_decode()` for parsing and `_encode()` for building. + """ + def _parse(self, stream, context, path): + data = stream_read_entire(stream, path) # reads entire stream + data = self._decode(data, context, path) + return self.subcon.parse(data, **context) + + def _build(self, obj, stream, context, path): + stream2 = io.BytesIO() + buildret = self.subcon._build(obj, stream2, context, path) + data = stream2.getvalue() + data = self._encode(data, context, path) + stream_write(stream, data, len(data), path) + return obj + + def _sizeof(self, context, path): + raise SizeofError(path=path) + + def _decode(self, data, context, path): + raise NotImplementedError + + def _encode(self, data, context, path): + raise NotImplementedError + + +class Compiled(Construct): + """Used internally.""" + + def __init__(self, parsefunc, buildfunc): + super().__init__() + self.source = None + self.defersubcon = None + self.parsefunc = parsefunc + self.buildfunc = buildfunc + + def _parse(self, stream, context, path): + return self.parsefunc(stream, context) + + def _build(self, obj, stream, context, path): + return self.buildfunc(obj, stream, context) + + def _sizeof(self, context, path): + return self.defersubcon._sizeof(context, path) + + def compile(self, filename=None): + return self + + def benchmark(self, sampledata, filename=None): + return self.defersubcon.benchmark(sampledata, filename) + + +#=============================================================================== +# bytes and bits +#=============================================================================== +class Bytes(Construct): + r""" + Field consisting of a specified number of bytes. + + Parses into a bytes (of given length). Builds into the stream directly (but checks that given object matches specified length). Can also build from an integer for convenience (although BytesInteger should be used instead). Size is the specified length. + + Can also build from a bytearray. + + :param length: integer or context lambda + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StringError: building from non-bytes value, perhaps unicode + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Bytes(4) + >>> d.parse(b'beef') + b'beef' + >>> d.build(b'beef') + b'beef' + >>> d.build(0) + b'\x00\x00\x00\x00' + >>> d.sizeof() + 4 + + >>> d = Struct( + ... "length" / Int8ub, + ... "data" / Bytes(this.length), + ... ) + >>> d.parse(b"\x04beef") + Container(length=4, data=b'beef') + >>> d.sizeof() + construct.core.SizeofError: cannot calculate size, key not found in context + """ + + def __init__(self, length): + super().__init__() + self.length = length + + def _parse(self, stream, context, path): + length = self.length(context) if callable(self.length) else self.length + return stream_read(stream, length, path) + + def _build(self, obj, stream, context, path): + length = self.length(context) if callable(self.length) else self.length + data = integer2bytes(obj, length) if isinstance(obj, int) else obj + data = bytes(data) if type(data) is bytearray else data + stream_write(stream, data, length, path) + return data + + def _sizeof(self, context, path): + try: + return self.length(context) if callable(self.length) else self.length + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + return f"io.read({self.length})" + + def _emitbuild(self, code): + return f"(io.write(obj), obj)[1]" + + def _emitfulltype(self, ksy, bitwise): + return dict(size=self.length) + + +@singleton +class GreedyBytes(Construct): + r""" + Field consisting of unknown number of bytes. + + Parses the stream to the end. Builds into the stream directly (without checks). Size is undefined. + + Can also build from a bytearray. + + :raises StreamError: stream failed when reading until EOF + :raises StringError: building from non-bytes value, perhaps unicode + + Example:: + + >>> GreedyBytes.parse(b"asislight") + b'asislight' + >>> GreedyBytes.build(b"asislight") + b'asislight' + """ + + def _parse(self, stream, context, path): + return stream_read_entire(stream, path) + + def _build(self, obj, stream, context, path): + data = bytes(obj) if type(obj) is bytearray else obj + stream_write(stream, data, len(data), path) + return data + + def _emitparse(self, code): + return f"io.read()" + + def _emitbuild(self, code): + return f"(io.write(obj), obj)[1]" + + def _emitfulltype(self, ksy, bitwise): + return dict(size_eos=True) + + +def Bitwise(subcon): + r""" + Converts the stream from bytes to bits, and passes the bitstream to underlying subcon. Bitstream is a stream that contains 8 times as many bytes, and each byte is either \\x00 or \\x01 (in documentation those bytes are called bits). + + Parsing building and size are deferred to subcon, although size gets divided by 8 (therefore the subcon's size must be a multiple of 8). + + Note that by default the bit ordering is from MSB to LSB for every byte (ie. bit-level big-endian). If you need it reversed, wrap this subcon with :class:`construct.core.BitsSwapped`. + + :param subcon: Construct instance, any field that works with bits (like BitsInteger) or is bit-byte agnostic (like Struct or Flag) + + See :class:`~construct.core.Transformed` and :class:`~construct.core.Restreamed` for raisable exceptions. + + Example:: + + >>> d = Bitwise(Struct( + ... 'a' / Nibble, + ... 'b' / Bytewise(Float32b), + ... 'c' / Padding(4), + ... )) + >>> d.parse(bytes(5)) + Container(a=0, b=0.0, c=None) + >>> d.sizeof() + 5 + + Obtaining other byte or bit orderings:: + + >>> d = Bitwise(Bytes(16)) + >>> d.parse(b'\x01\x03') + b'\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x01\x01' + >>> d = BitsSwapped(Bitwise(Bytes(16))) + >>> d.parse(b'\x01\x03') + b'\x01\x00\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00' + """ + + try: + size = subcon.sizeof() + macro = Transformed(subcon, bytes2bits, size//8, bits2bytes, size//8) + except SizeofError: + macro = Restreamed(subcon, bytes2bits, 1, bits2bytes, 8, lambda n: n//8) + def _emitseq(ksy, bitwise): + return subcon._compileseq(ksy, bitwise=True) + def _emitprimitivetype(ksy, bitwise): + return subcon._compileprimitivetype(ksy, bitwise=True) + def _emitfulltype(ksy, bitwise): + return subcon._compilefulltype(ksy, bitwise=True) + macro._emitseq = _emitseq + macro._emitprimitivetype = _emitprimitivetype + macro._emitfulltype = _emitfulltype + return macro + + +def Bytewise(subcon): + r""" + Converts the bitstream back to normal byte stream. Must be used within :class:`~construct.core.Bitwise`. + + Parsing building and size are deferred to subcon, although size gets multiplied by 8. + + :param subcon: Construct instance, any field that works with bytes or is bit-byte agnostic + + See :class:`~construct.core.Transformed` and :class:`~construct.core.Restreamed` for raisable exceptions. + + Example:: + + >>> d = Bitwise(Struct( + ... 'a' / Nibble, + ... 'b' / Bytewise(Float32b), + ... 'c' / Padding(4), + ... )) + >>> d.parse(bytes(5)) + Container(a=0, b=0.0, c=None) + >>> d.sizeof() + 5 + """ + + try: + size = subcon.sizeof() + macro = Transformed(subcon, bits2bytes, size*8, bytes2bits, size*8) + except SizeofError: + macro = Restreamed(subcon, bits2bytes, 8, bytes2bits, 1, lambda n: n*8) + def _emitseq(ksy, bitwise): + return subcon._compileseq(ksy, bitwise=False) + def _emitprimitivetype(ksy, bitwise): + return subcon._compileprimitivetype(ksy, bitwise=False) + def _emitfulltype(ksy, bitwise): + return subcon._compilefulltype(ksy, bitwise=False) + macro._emitseq = _emitseq + macro._emitprimitivetype = _emitprimitivetype + macro._emitfulltype = _emitfulltype + return macro + + +#=============================================================================== +# integers and floats +#=============================================================================== +class FormatField(Construct): + r""" + Field that uses `struct` module to pack and unpack CPU-sized integers and floats and booleans. This is used to implement most Int* Float* fields, but for example cannot pack 24-bit integers, which is left to :class:`~construct.core.BytesInteger` class. For booleans I also recommend using Flag class instead. + + See `struct module `_ documentation for instructions on crafting format strings. + + Parses into an integer or float or boolean. Builds from an integer or float or boolean into specified byte count and endianness. Size is determined by `struct` module according to specified format string. + + :param endianity: string, character like: < > = + :param format: string, character like: B H L Q b h l q e f d ? + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises FormatFieldError: wrong format string, or struct.(un)pack complained about the value + + Example:: + + >>> d = FormatField(">", "H") or Int16ub + >>> d.parse(b"\x01\x00") + 256 + >>> d.build(256) + b"\x01\x00" + >>> d.sizeof() + 2 + """ + + def __init__(self, endianity, format): + if endianity not in list("=<>"): + raise FormatFieldError("endianity must be like: = < >", endianity) + if format not in list("fdBHLQbhlqe?"): + raise FormatFieldError("format must be like: B H L Q b h l q e f d ?", format) + + super().__init__() + self.fmtstr = endianity+format + self.length = struct.calcsize(endianity+format) + + def _parse(self, stream, context, path): + data = stream_read(stream, self.length, path) + try: + return struct.unpack(self.fmtstr, data)[0] + except Exception: + raise FormatFieldError("struct %r error during parsing" % self.fmtstr, path=path) + + def _build(self, obj, stream, context, path): + try: + data = struct.pack(self.fmtstr, obj) + except Exception: + raise FormatFieldError("struct %r error during building, given value %r" % (self.fmtstr, obj), path=path) + stream_write(stream, data, self.length, path) + return obj + + def _sizeof(self, context, path): + return self.length + + def _emitparse(self, code): + fname = f"formatfield_{code.allocateId()}" + code.append(f"{fname} = struct.Struct({repr(self.fmtstr)})") + return f"{fname}.unpack(io.read({self.length}))[0]" + + def _emitbuild(self, code): + fname = f"formatfield_{code.allocateId()}" + code.append(f"{fname} = struct.Struct({repr(self.fmtstr)})") + return f"(io.write({fname}.pack(obj)), obj)[1]" + + def _emitprimitivetype(self, ksy, bitwise): + endianity,format = self.fmtstr + signed = format.islower() + swapped = (endianity == "<") or (endianity == "=" and sys.byteorder == "little") + if format in "bhlqBHLQ": + if bitwise: + assert not signed + assert not swapped + return "b%s" % (8*self.length, ) + else: + return "%s%s%s" % ("s" if signed else "u", self.length, "le" if swapped else "be", ) + if format in "fd": + assert not bitwise + return "f%s%s" % (self.length, "le" if swapped else "be", ) + + +class BytesInteger(Construct): + r""" + Field that packs integers of arbitrary size. Int24* fields use this class. + + Parses into an integer. Builds from an integer into specified byte count and endianness. Size is specified in ctor. + + Analog to :class:`~construct.core.BitsInteger` which operates on bits. In fact:: + + BytesInteger(n) <--> Bitwise(BitsInteger(8*n)) + BitsInteger(8*n) <--> Bytewise(BytesInteger(n)) + + Byte ordering refers to bytes (chunks of 8 bits) so, for example:: + + BytesInteger(n, swapped=True) <--> Bitwise(BitsInteger(8*n, swapped=True)) + + :param length: integer or context lambda, number of bytes in the field + :param signed: bool, whether the value is signed (two's complement), default is False (unsigned) + :param swapped: bool or context lambda, whether to swap byte order (little endian), default is False (big endian) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises IntegerError: length is negative + :raises IntegerError: value is not an integer + :raises IntegerError: number does not fit given width and signed parameters + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = BytesInteger(4) or Int32ub + >>> d.parse(b"abcd") + 1633837924 + >>> d.build(1) + b'\x00\x00\x00\x01' + >>> d.sizeof() + 4 + """ + + def __init__(self, length, signed=False, swapped=False): + super().__init__() + self.length = length + self.signed = signed + self.swapped = swapped + + def _parse(self, stream, context, path): + length = evaluate(self.length, context) + if length < 0: + raise IntegerError(f"length {length} must be non-negative", path=path) + data = stream_read(stream, length, path) + if evaluate(self.swapped, context): + data = swapbytes(data) + try: + return bytes2integer(data, self.signed) + except ValueError as e: + raise IntegerError(str(e), path=path) + + def _build(self, obj, stream, context, path): + if not isinstance(obj, integertypes): + raise IntegerError(f"value {obj} is not an integer", path=path) + if obj < 0 and not self.signed: + raise IntegerError(f"value {obj} is negative but signed is false", path=path) + length = evaluate(self.length, context) + if length < 0: + raise IntegerError(f"length {length} must be non-negative", path=path) + try: + data = integer2bytes(obj, length, self.signed) + except ValueError as e: + raise IntegerError(str(e), path=path) + if evaluate(self.swapped, context): + data = swapbytes(data) + stream_write(stream, data, length, path) + return obj + + def _sizeof(self, context, path): + try: + return evaluate(self.length, context) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + return f"bytes2integer(swapbytes(io.read({self.length})) if {self.swapped} else io.read({self.length}), {self.signed})" + + def _emitbuild(self, code): + return f"((io.write(swapbytes(integer2bytes(obj, {self.length}, {self.signed})) if ({self.swapped}) else integer2bytes(obj, {self.length}, {self.signed}))), obj)[1]" + + def _emitprimitivetype(self, ksy, bitwise): + if bitwise: + assert not self.signed + assert not self.swapped + return "b%s" % (8*self.length, ) + else: + assert not callable(self.swapped) + return "%s%s%s" % ("s" if self.signed else "u", self.length, "le" if self.swapped else "be", ) + + +class BitsInteger(Construct): + r""" + Field that packs arbitrarily large (or small) integers. Some fields (Bit Nibble Octet) use this class. Must be enclosed in :class:`~construct.core.Bitwise` context. + + Parses into an integer. Builds from an integer into specified bit count and endianness. Size (in bits) is specified in ctor. + + Analog to :class:`~construct.core.BytesInteger` which operates on bytes. In fact:: + + BytesInteger(n) <--> Bitwise(BitsInteger(8*n)) + BitsInteger(8*n) <--> Bytewise(BytesInteger(n)) + + Note that little-endianness is only defined for multiples of 8 bits. + + Byte ordering (i.e. `swapped` parameter) refers to bytes (chunks of 8 bits) so, for example:: + + BytesInteger(n, swapped=True) <--> Bitwise(BitsInteger(8*n, swapped=True)) + + Swapped argument was recently fixed. To obtain previous (faulty) behavior, you can use `ByteSwapped`, `BitsSwapped` and `Bitwise` in whatever particular order (see examples). + + :param length: integer or context lambda, number of bits in the field + :param signed: bool, whether the value is signed (two's complement), default is False (unsigned) + :param swapped: bool or context lambda, whether to swap byte order (little endian), default is False (big endian) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises IntegerError: length is negative + :raises IntegerError: value is not an integer + :raises IntegerError: number does not fit given width and signed parameters + :raises IntegerError: little-endianness selected but length is not multiple of 8 bits + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Examples:: + + >>> d = Bitwise(BitsInteger(8)) or Bitwise(Octet) + >>> d.parse(b"\x10") + 16 + >>> d.build(255) + b'\xff' + >>> d.sizeof() + 1 + + Obtaining other byte or bit orderings:: + + >>> d = BitsInteger(2) + >>> d.parse(b'\x01\x00') # Bit-Level Big-Endian + 2 + >>> d = ByteSwapped(BitsInteger(2)) + >>> d.parse(b'\x01\x00') # Bit-Level Little-Endian + 1 + >>> d = BitsInteger(16) # Byte-Level Big-Endian, Bit-Level Big-Endian + >>> d.build(5 + 19*256) + b'\x00\x00\x00\x01\x00\x00\x01\x01\x00\x00\x00\x00\x00\x01\x00\x01' + >>> d = BitsInteger(16, swapped=True) # Byte-Level Little-Endian, Bit-Level Big-Endian + >>> d.build(5 + 19*256) + b'\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x01\x01' + >>> d = ByteSwapped(BitsInteger(16)) # Byte-Level Little-Endian, Bit-Level Little-Endian + >>> d.build(5 + 19*256) + b'\x01\x00\x01\x00\x00\x00\x00\x00\x01\x01\x00\x00\x01\x00\x00\x00' + >>> d = ByteSwapped(BitsInteger(16, swapped=True)) # Byte-Level Big-Endian, Bit-Level Little-Endian + >>> d.build(5 + 19*256) + b'\x01\x01\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00' + """ + + def __init__(self, length, signed=False, swapped=False): + super().__init__() + self.length = length + self.signed = signed + self.swapped = swapped + + def _parse(self, stream, context, path): + length = evaluate(self.length, context) + if length < 0: + raise IntegerError(f"length {length} must be non-negative", path=path) + data = stream_read(stream, length, path) + if evaluate(self.swapped, context): + if length % 8: + raise IntegerError(f"little-endianness is only defined if {length} is multiple of 8 bits", path=path) + data = swapbytesinbits(data) + try: + return bits2integer(data, self.signed) + except ValueError as e: + raise IntegerError(str(e), path=path) + + def _build(self, obj, stream, context, path): + if not isinstance(obj, integertypes): + raise IntegerError(f"value {obj} is not an integer", path=path) + if obj < 0 and not self.signed: + raise IntegerError(f"value {obj} is negative but signed is false", path=path) + length = evaluate(self.length, context) + if length < 0: + raise IntegerError(f"length {length} must be non-negative", path=path) + try: + data = integer2bits(obj, length, self.signed) + except ValueError as e: + raise IntegerError(str(e), path=path) + if evaluate(self.swapped, context): + if length % 8: + raise IntegerError(f"little-endianness is only defined if {length} is multiple of 8 bits", path=path) + data = swapbytesinbits(data) + stream_write(stream, data, length, path) + return obj + + def _sizeof(self, context, path): + try: + return evaluate(self.length, context) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + return f"bits2integer(swapbytesinbits(io.read({self.length})) if {self.swapped} else io.read({self.length}), {self.signed})" + + def _emitbuild(self, code): + return f"((io.write(swapbytesinbits(integer2bits(obj, {self.length}, {self.signed})) if ({self.swapped}) else integer2bits(obj, {self.length}, {self.signed}))), obj)[1]" + + def _emitprimitivetype(self, ksy, bitwise): + assert not self.signed + assert not self.swapped + return "b%s" % (self.length, ) + + +@singleton +def Bit(): + """A 1-bit integer, must be enclosed in a Bitwise (eg. BitStruct)""" + return BitsInteger(1) +@singleton +def Nibble(): + """A 4-bit integer, must be enclosed in a Bitwise (eg. BitStruct)""" + return BitsInteger(4) +@singleton +def Octet(): + """A 8-bit integer, must be enclosed in a Bitwise (eg. BitStruct)""" + return BitsInteger(8) + +@singleton +def Int8ub(): + """Unsigned, big endian 8-bit integer""" + return FormatField(">", "B") +@singleton +def Int16ub(): + """Unsigned, big endian 16-bit integer""" + return FormatField(">", "H") +@singleton +def Int32ub(): + """Unsigned, big endian 32-bit integer""" + return FormatField(">", "L") +@singleton +def Int64ub(): + """Unsigned, big endian 64-bit integer""" + return FormatField(">", "Q") + +@singleton +def Int8sb(): + """Signed, big endian 8-bit integer""" + return FormatField(">", "b") +@singleton +def Int16sb(): + """Signed, big endian 16-bit integer""" + return FormatField(">", "h") +@singleton +def Int32sb(): + """Signed, big endian 32-bit integer""" + return FormatField(">", "l") +@singleton +def Int64sb(): + """Signed, big endian 64-bit integer""" + return FormatField(">", "q") + +@singleton +def Int8ul(): + """Unsigned, little endian 8-bit integer""" + return FormatField("<", "B") +@singleton +def Int16ul(): + """Unsigned, little endian 16-bit integer""" + return FormatField("<", "H") +@singleton +def Int32ul(): + """Unsigned, little endian 32-bit integer""" + return FormatField("<", "L") +@singleton +def Int64ul(): + """Unsigned, little endian 64-bit integer""" + return FormatField("<", "Q") + +@singleton +def Int8sl(): + """Signed, little endian 8-bit integer""" + return FormatField("<", "b") +@singleton +def Int16sl(): + """Signed, little endian 16-bit integer""" + return FormatField("<", "h") +@singleton +def Int32sl(): + """Signed, little endian 32-bit integer""" + return FormatField("<", "l") +@singleton +def Int64sl(): + """Signed, little endian 64-bit integer""" + return FormatField("<", "q") + +@singleton +def Int8un(): + """Unsigned, native endianity 8-bit integer""" + return FormatField("=", "B") +@singleton +def Int16un(): + """Unsigned, native endianity 16-bit integer""" + return FormatField("=", "H") +@singleton +def Int32un(): + """Unsigned, native endianity 32-bit integer""" + return FormatField("=", "L") +@singleton +def Int64un(): + """Unsigned, native endianity 64-bit integer""" + return FormatField("=", "Q") + +@singleton +def Int8sn(): + """Signed, native endianity 8-bit integer""" + return FormatField("=", "b") +@singleton +def Int16sn(): + """Signed, native endianity 16-bit integer""" + return FormatField("=", "h") +@singleton +def Int32sn(): + """Signed, native endianity 32-bit integer""" + return FormatField("=", "l") +@singleton +def Int64sn(): + """Signed, native endianity 64-bit integer""" + return FormatField("=", "q") + +Byte = Int8ub +Short = Int16ub +Int = Int32ub +Long = Int64ub + +@singleton +def Float16b(): + """Big endian, 16-bit IEEE 754 floating point number""" + return FormatField(">", "e") +@singleton +def Float16l(): + """Little endian, 16-bit IEEE 754 floating point number""" + return FormatField("<", "e") +@singleton +def Float16n(): + """Native endianity, 16-bit IEEE 754 floating point number""" + return FormatField("=", "e") + +@singleton +def Float32b(): + """Big endian, 32-bit IEEE floating point number""" + return FormatField(">", "f") +@singleton +def Float32l(): + """Little endian, 32-bit IEEE floating point number""" + return FormatField("<", "f") +@singleton +def Float32n(): + """Native endianity, 32-bit IEEE floating point number""" + return FormatField("=", "f") + +@singleton +def Float64b(): + """Big endian, 64-bit IEEE floating point number""" + return FormatField(">", "d") +@singleton +def Float64l(): + """Little endian, 64-bit IEEE floating point number""" + return FormatField("<", "d") +@singleton +def Float64n(): + """Native endianity, 64-bit IEEE floating point number""" + return FormatField("=", "d") + +Half = Float16b +Single = Float32b +Double = Float64b + +native = (sys.byteorder == "little") + +@singleton +def Int24ub(): + """A 3-byte big-endian unsigned integer, as used in ancient file formats.""" + return BytesInteger(3, signed=False, swapped=False) +@singleton +def Int24ul(): + """A 3-byte little-endian unsigned integer, as used in ancient file formats.""" + return BytesInteger(3, signed=False, swapped=True) +@singleton +def Int24un(): + """A 3-byte native-endian unsigned integer, as used in ancient file formats.""" + return BytesInteger(3, signed=False, swapped=native) +@singleton +def Int24sb(): + """A 3-byte big-endian signed integer, as used in ancient file formats.""" + return BytesInteger(3, signed=True, swapped=False) +@singleton +def Int24sl(): + """A 3-byte little-endian signed integer, as used in ancient file formats.""" + return BytesInteger(3, signed=True, swapped=True) +@singleton +def Int24sn(): + """A 3-byte native-endian signed integer, as used in ancient file formats.""" + return BytesInteger(3, signed=True, swapped=native) + + +@singleton +class VarInt(Construct): + r""" + VarInt encoded unsigned integer. Each 7 bits of the number are encoded in one byte of the stream, where leftmost bit (MSB) is unset when byte is terminal. Scheme is defined at Google site related to `Protocol Buffers `_. + + Can only encode non-negative numbers. + + Parses into an integer. Builds from an integer. Size is undefined. + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises IntegerError: given a negative value, or not an integer + + Example:: + + >>> VarInt.build(1) + b'\x01' + >>> VarInt.build(2**100) + b'\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x04' + """ + + def _parse(self, stream, context, path): + acc = [] + while True: + b = byte2int(stream_read(stream, 1, path)) + acc.append(b & 0b01111111) + if b & 0b10000000 == 0: + break + num = 0 + for b in reversed(acc): + num = (num << 7) | b + return num + + def _build(self, obj, stream, context, path): + if not isinstance(obj, integertypes): + raise IntegerError(f"value {obj} is not an integer", path=path) + if obj < 0: + raise IntegerError(f"VarInt cannot build from negative number {obj}", path=path) + x = obj + B = bytearray() + while x > 0b01111111: + B.append(0b10000000 | (x & 0b01111111)) + x >>= 7 + B.append(x) + stream_write(stream, bytes(B), len(B), path) + return obj + + def _emitprimitivetype(self, ksy, bitwise): + return "vlq_base128_le" + + +@singleton +class ZigZag(Construct): + r""" + ZigZag encoded signed integer. This is a variant of VarInt encoding that also can encode negative numbers. Scheme is defined at Google site related to `Protocol Buffers `_. + + Can encode negative numbers. + + Parses into an integer. Builds from an integer. Size is undefined. + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises IntegerError: given not an integer + + Example:: + + >>> ZigZag.build(-3) + b'\x05' + >>> ZigZag.build(3) + b'\x06' + """ + + def _parse(self, stream, context, path): + x = VarInt._parse(stream, context, path) + if x & 1 == 0: + x = x//2 + else: + x = -(x//2+1) + return x + + def _build(self, obj, stream, context, path): + if not isinstance(obj, integertypes): + raise IntegerError(f"value {obj} is not an integer", path=path) + if obj >= 0: + x = 2*obj + else: + x = 2*abs(obj)-1 + VarInt._build(x, stream, context, path) + return obj + + +#=============================================================================== +# strings +#=============================================================================== + +#: Explicitly supported encodings (by PaddedString and CString classes). +#: +possiblestringencodings = dict( + ascii=1, + utf8=1, utf_8=1, u8=1, + utf16=2, utf_16=2, u16=2, utf_16_be=2, utf_16_le=2, + utf32=4, utf_32=4, u32=4, utf_32_be=4, utf_32_le=4, +) + + +def encodingunit(encoding): + """Used internally.""" + encoding = encoding.replace("-","_").lower() + if encoding not in possiblestringencodings: + raise StringError("encoding %r not found among %r" % (encoding, possiblestringencodings,)) + return bytes(possiblestringencodings[encoding]) + + +class StringEncoded(Adapter): + """Used internally.""" + + def __init__(self, subcon, encoding): + super().__init__(subcon) + if not encoding: + raise StringError("String* classes require explicit encoding") + self.encoding = encoding + + def _decode(self, obj, context, path): + return obj.decode(self.encoding) + + def _encode(self, obj, context, path): + if not isinstance(obj, unicodestringtype): + raise StringError("string encoding failed, expected unicode string", path=path) + if obj == u"": + return b"" + return obj.encode(self.encoding) + + def _emitparse(self, code): + return f"({self.subcon._compileparse(code)}).decode({repr(self.encoding)})" + + def _emitbuild(self, code): + raise NotImplementedError + # This is not a valid implementation. obj.encode() should be inserted into subcon + # return f"({self.subcon._compilebuild(code)}).encode({repr(self.encoding)})" + + +def PaddedString(length, encoding): + r""" + Configurable, fixed-length or variable-length string field. + + When parsing, the byte string is stripped of null bytes (per encoding unit), then decoded. Length is an integer or context lambda. When building, the string is encoded and then padded to specified length. If encoded string is larger than the specified length, it fails with PaddingError. Size is same as length parameter. + + .. warning:: PaddedString and CString only support encodings explicitly listed in :class:`~construct.core.possiblestringencodings` . + + :param length: integer or context lambda, length in bytes (not unicode characters) + :param encoding: string like: utf8 utf16 utf32 ascii + + :raises StringError: building a non-unicode string + :raises StringError: selected encoding is not on supported list + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = PaddedString(10, "utf8") + >>> d.build(u"Афон") + b'\xd0\x90\xd1\x84\xd0\xbe\xd0\xbd\x00\x00' + >>> d.parse(_) + u'Афон' + """ + macro = StringEncoded(FixedSized(length, NullStripped(GreedyBytes, pad=encodingunit(encoding))), encoding) + def _emitfulltype(ksy, bitwise): + return dict(size=length, type="strz", encoding=encoding) + macro._emitfulltype = _emitfulltype + return macro + + +def PascalString(lengthfield, encoding): + r""" + Length-prefixed string. The length field can be variable length (such as VarInt) or fixed length (such as Int64ub). :class:`~construct.core.VarInt` is recommended when designing new protocols. Stored length is in bytes, not characters. Size is not defined. + + :param lengthfield: Construct instance, field used to parse and build the length (like VarInt Int64ub) + :param encoding: string like: utf8 utf16 utf32 ascii + + :raises StringError: building a non-unicode string + + Example:: + + >>> d = PascalString(VarInt, "utf8") + >>> d.build(u"Афон") + b'\x08\xd0\x90\xd1\x84\xd0\xbe\xd0\xbd' + >>> d.parse(_) + u'Афон' + """ + macro = StringEncoded(Prefixed(lengthfield, GreedyBytes), encoding) + + def _emitparse(code): + return f"io.read({lengthfield._compileparse(code)}).decode({repr(encoding)})" + macro._emitparse = _emitparse + + def _emitseq(ksy, bitwise): + return [ + dict(id="lengthfield", type=lengthfield._compileprimitivetype(ksy, bitwise)), + dict(id="data", size="lengthfield", type="str", encoding=encoding), + ] + macro._emitseq = _emitseq + + return macro + + +def CString(encoding): + r""" + String ending in a terminating null byte (or null bytes in case of UTF16 UTF32). + + .. warning:: String and CString only support encodings explicitly listed in :class:`~construct.core.possiblestringencodings` . + + :param encoding: string like: utf8 utf16 utf32 ascii + + :raises StringError: building a non-unicode string + :raises StringError: selected encoding is not on supported list + + Example:: + + >>> d = CString("utf8") + >>> d.build(u"Афон") + b'\xd0\x90\xd1\x84\xd0\xbe\xd0\xbd\x00' + >>> d.parse(_) + u'Афон' + """ + macro = StringEncoded(NullTerminated(GreedyBytes, term=encodingunit(encoding)), encoding) + def _emitfulltype(ksy, bitwise): + return dict(type="strz", encoding=encoding) + macro._emitfulltype = _emitfulltype + return macro + + +def GreedyString(encoding): + r""" + String that reads entire stream until EOF, and writes a given string as-is. Analog to :class:`~construct.core.GreedyBytes` but also applies unicode-to-bytes encoding. + + :param encoding: string like: utf8 utf16 utf32 ascii + + :raises StringError: building a non-unicode string + :raises StreamError: stream failed when reading until EOF + + Example:: + + >>> d = GreedyString("utf8") + >>> d.build(u"Афон") + b'\xd0\x90\xd1\x84\xd0\xbe\xd0\xbd' + >>> d.parse(_) + u'Афон' + """ + macro = StringEncoded(GreedyBytes, encoding) + def _emitfulltype(ksy, bitwise): + return dict(size_eos=True, type="str", encoding=encoding) + macro._emitfulltype = _emitfulltype + return macro + + +#=============================================================================== +# mappings +#=============================================================================== +@singleton +class Flag(Construct): + r""" + One byte (or one bit) field that maps to True or False. Other non-zero bytes are also considered True. Size is defined as 1. + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Example:: + + >>> Flag.parse(b"\x01") + True + >>> Flag.build(True) + b'\x01' + """ + + def _parse(self, stream, context, path): + return stream_read(stream, 1, path) != b"\x00" + + def _build(self, obj, stream, context, path): + stream_write(stream, b"\x01" if obj else b"\x00", 1, path) + return obj + + def _sizeof(self, context, path): + return 1 + + def _emitparse(self, code): + return f"(io.read(1) != b'\\x00')" + + def _emitbuild(self, code): + return f"((io.write(b'\\x01') if obj else io.write(b'\\x00')), obj)[1]" + + def _emitfulltype(self, ksy, bitwise): + return dict(type=("b1" if bitwise else "u1"), _construct_render="Flag") + + +class EnumInteger(int): + """Used internally.""" + pass + + +class EnumIntegerString(str): + """Used internally.""" + + def __repr__(self): + return "EnumIntegerString.new(%s, %s)" % (self.intvalue, str.__repr__(self), ) + + def __int__(self): + return self.intvalue + + @staticmethod + def new(intvalue, stringvalue): + ret = EnumIntegerString(stringvalue) + ret.intvalue = intvalue + return ret + + +class Enum(Adapter): + r""" + Translates unicode label names to subcon values, and vice versa. + + Parses integer subcon, then uses that value to lookup mapping dictionary. Returns an integer-convertible string (if mapping found) or an integer (otherwise). Building is a reversed process. Can build from an integer flag or string label. Size is same as subcon, unless it raises SizeofError. + + There is no default parameter, because if no mapping is found, it parses into an integer without error. + + This class supports enum34 module. See examples. + + This class supports exposing member labels as attributes, as integer-convertible strings. See examples. + + :param subcon: Construct instance, subcon to map to/from + :param \*merge: optional, list of enum.IntEnum and enum.IntFlag instances, to merge labels and values from + :param \*\*mapping: dict, mapping string names to values + + :raises MappingError: building from string but no mapping found + + Example:: + + >>> d = Enum(Byte, one=1, two=2, four=4, eight=8) + >>> d.parse(b"\x01") + 'one' + >>> int(d.parse(b"\x01")) + 1 + >>> d.parse(b"\xff") + 255 + >>> int(d.parse(b"\xff")) + 255 + + >>> d.build(d.one or "one" or 1) + b'\x01' + >>> d.one + 'one' + + import enum + class E(enum.IntEnum or enum.IntFlag): + one = 1 + two = 2 + + Enum(Byte, E) <--> Enum(Byte, one=1, two=2) + FlagsEnum(Byte, E) <--> FlagsEnum(Byte, one=1, two=2) + """ + + def __init__(self, subcon, *merge, **mapping): + super().__init__(subcon) + for enum in merge: + for enumentry in enum: + mapping[enumentry.name] = enumentry.value + self.encmapping = {EnumIntegerString.new(v,k):v for k,v in mapping.items()} + self.decmapping = {v:EnumIntegerString.new(v,k) for k,v in mapping.items()} + self.ksymapping = {v:k for k,v in mapping.items()} + + def __getattr__(self, name): + if name in self.encmapping: + return self.decmapping[self.encmapping[name]] + raise AttributeError + + def _decode(self, obj, context, path): + try: + return self.decmapping[obj] + except KeyError: + return EnumInteger(obj) + + def _encode(self, obj, context, path): + try: + if isinstance(obj, integertypes): + return obj + return self.encmapping[obj] + except KeyError: + raise MappingError("building failed, no mapping for %r" % (obj,), path=path) + + def _emitparse(self, code): + fname = f"factory_{code.allocateId()}" + code.append(f"{fname} = {repr(self.decmapping)}") + return f"reuse(({self.subcon._compileparse(code)}), lambda x: {fname}.get(x, EnumInteger(x)))" + + def _emitbuild(self, code): + fname = f"factory_{code.allocateId()}" + code.append(f"{fname} = {repr(self.encmapping)}") + return f"reuse({fname}.get(obj, obj), lambda obj: ({self.subcon._compilebuild(code)}))" + + def _emitprimitivetype(self, ksy, bitwise): + name = "enum_%s" % ksy.allocateId() + ksy.enums[name] = self.ksymapping + return name + + +class BitwisableString(str): + """Used internally.""" + + # def __repr__(self): + # return "BitwisableString(%s)" % (str.__repr__(self), ) + + def __or__(self, other): + return BitwisableString("{}|{}".format(self, other)) + + +class FlagsEnum(Adapter): + r""" + Translates unicode label names to subcon integer (sub)values, and vice versa. + + Parses integer subcon, then creates a Container, where flags define each key. Builds from a container by bitwise-oring of each flag if it matches a set key. Can build from an integer flag or string label directly, as well as | concatenations thereof (see examples). Size is same as subcon, unless it raises SizeofError. + + This class supports enum34 module. See examples. + + This class supports exposing member labels as attributes, as bitwisable strings. See examples. + + :param subcon: Construct instance, must operate on integers + :param \*merge: optional, list of enum.IntEnum and enum.IntFlag instances, to merge labels and values from + :param \*\*flags: dict, mapping string names to integer values + + :raises MappingError: building from object not like: integer string dict + :raises MappingError: building from string but no mapping found + + Can raise arbitrary exceptions when computing | and & and value is non-integer. + + Example:: + + >>> d = FlagsEnum(Byte, one=1, two=2, four=4, eight=8) + >>> d.parse(b"\x03") + Container(one=True, two=True, four=False, eight=False) + >>> d.build(dict(one=True,two=True)) + b'\x03' + + >>> d.build(d.one|d.two or "one|two" or 1|2) + b'\x03' + + import enum + class E(enum.IntEnum or enum.IntFlag): + one = 1 + two = 2 + + Enum(Byte, E) <--> Enum(Byte, one=1, two=2) + FlagsEnum(Byte, E) <--> FlagsEnum(Byte, one=1, two=2) + """ + + def __init__(self, subcon, *merge, **flags): + super().__init__(subcon) + for enum in merge: + for enumentry in enum: + flags[enumentry.name] = enumentry.value + self.flags = flags + self.reverseflags = {v:k for k,v in flags.items()} + + def __getattr__(self, name): + if name in self.flags: + return BitwisableString(name) + raise AttributeError + + def _decode(self, obj, context, path): + obj2 = Container() + obj2._flagsenum = True + for name,value in self.flags.items(): + obj2[BitwisableString(name)] = (obj & value == value) + return obj2 + + def _encode(self, obj, context, path): + try: + if isinstance(obj, integertypes): + return obj + if isinstance(obj, stringtypes): + flags = 0 + for name in obj.split("|"): + name = name.strip() + if name: + flags |= self.flags[name] # KeyError + return flags + if isinstance(obj, dict): + flags = 0 + for name,value in obj.items(): + if not name.startswith("_"): # assumes key is a string + if value: + flags |= self.flags[name] # KeyError + return flags + raise MappingError("building failed, unknown object: %r" % (obj,), path=path) + except KeyError: + raise MappingError("building failed, unknown label: %r" % (obj,), path=path) + + def _emitparse(self, code): + return f"reuse(({self.subcon._compileparse(code)}), lambda x: Container({', '.join(f'{k}=bool(x & {v} == {v})' for k,v in self.flags.items()) }))" + + def _emitseq(self, ksy, bitwise): + bitstotal = self.subcon.sizeof() * 8 + seq = [] + for i in range(bitstotal): + value = 1<>> x = object + >>> d = Mapping(Byte, {x:0}) + >>> d.parse(b"\x00") + x + >>> d.build(x) + b'\x00' + """ + + def __init__(self, subcon, mapping): + super().__init__(subcon) + self.decmapping = {v:k for k,v in mapping.items()} + self.encmapping = mapping + + def _decode(self, obj, context, path): + try: + return self.decmapping[obj] # KeyError + except (KeyError, TypeError): + raise MappingError("parsing failed, no decoding mapping for %r" % (obj,), path=path) + + def _encode(self, obj, context, path): + try: + return self.encmapping[obj] # KeyError + except (KeyError, TypeError): + raise MappingError("building failed, no encoding mapping for %r" % (obj,), path=path) + + def _emitparse(self, code): + fname = f"factory_{code.allocateId()}" + code.append(f"{fname} = {repr(self.decmapping)}") + return f"{fname}[{self.subcon._compileparse(code)}]" + + def _emitbuild(self, code): + fname = f"factory_{code.allocateId()}" + code.append(f"{fname} = {repr(self.encmapping)}") + return f"reuse({fname}[obj], lambda obj: ({self.subcon._compilebuild(code)}))" + + +#=============================================================================== +# structures and sequences +#=============================================================================== +class Struct(Construct): + r""" + Sequence of usually named constructs, similar to structs in C. The members are parsed and build in the order they are defined. If a member is anonymous (its name is None) then it gets parsed and the value discarded, or it gets build from nothing (from None). + + Some fields do not need to be named, since they are built without value anyway. See: Const Padding Check Error Pass Terminated Seek Tell for examples of such fields. + + Operator + can also be used to make Structs (although not recommended). + + Parses into a Container (dict with attribute and key access) where keys match subcon names. Builds from a dict (not necessarily a Container) where each member gets a value from the dict matching the subcon name. If field has build-from-none flag, it gets build even when there is no matching entry in the dict. Size is the sum of all subcon sizes, unless any subcon raises SizeofError. + + This class does context nesting, meaning its members are given access to a new dictionary where the "_" entry points to the outer context. When parsing, each member gets parsed and subcon parse return value is inserted into context under matching key only if the member was named. When building, the matching entry gets inserted into context before subcon gets build, and if subcon build returns a new value (not None) that gets replaced in the context. + + This class exposes subcons as attributes. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) by accessing the struct attributes, under same name. Also note that compiler does not support this feature. See examples. + + This class exposes subcons in the context. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) within other inlined fields using the context. Note that you need to use a lambda (`this` expression is not supported). Also note that compiler does not support this feature. See examples. + + This class supports stopping. If :class:`~construct.core.StopIf` field is a member, and it evaluates its lambda as positive, this class ends parsing or building as successful without processing further fields. + + :param \*subcons: Construct instances, list of members, some can be anonymous + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises KeyError: building a subcon but found no corresponding key in dictionary + + Example:: + + >>> d = Struct("num"/Int8ub, "data"/Bytes(this.num)) + >>> d.parse(b"\x04DATA") + Container(num=4, data=b"DATA") + >>> d.build(dict(num=4, data=b"DATA")) + b"\x04DATA" + + >>> d = Struct(Const(b"MZ"), Padding(2), Pass, Terminated) + >>> d.build({}) + b'MZ\x00\x00' + >>> d.parse(_) + Container() + >>> d.sizeof() + 4 + + >>> d = Struct( + ... "animal" / Enum(Byte, giraffe=1), + ... ) + >>> d.animal.giraffe + 'giraffe' + >>> d = Struct( + ... "count" / Byte, + ... "data" / Bytes(lambda this: this.count - this._subcons.count.sizeof()), + ... ) + >>> d.build(dict(count=3, data=b"12")) + b'\x0312' + + Alternative syntax (not recommended): + >>> ("a"/Byte + "b"/Byte + "c"/Byte + "d"/Byte) + + Alternative syntax, but requires Python 3.6 or any PyPy: + >>> Struct(a=Byte, b=Byte, c=Byte, d=Byte) + """ + + def __init__(self, *subcons, **subconskw): + super().__init__() + self.subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + self._subcons = Container((sc.name,sc) for sc in self.subcons if sc.name) + self.flagbuildnone = all(sc.flagbuildnone for sc in self.subcons) + + def __getattr__(self, name): + if name in self._subcons: + return self._subcons[name] + raise AttributeError + + def _parse(self, stream, context, path): + obj = Container() + obj._io = stream + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + for sc in self.subcons: + try: + subobj = sc._parsereport(stream, context, path) + if sc.name: + obj[sc.name] = subobj + context[sc.name] = subobj + except StopFieldError: + break + return obj + + def _build(self, obj, stream, context, path): + if obj is None: + obj = Container() + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + context.update(obj) + for sc in self.subcons: + try: + if sc.flagbuildnone: + subobj = obj.get(sc.name, None) + else: + subobj = obj[sc.name] # raises KeyError + + if sc.name: + context[sc.name] = subobj + + buildret = sc._build(subobj, stream, context, path) + if sc.name: + context[sc.name] = buildret + except StopFieldError: + break + return context + + def _sizeof(self, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = None, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + try: + return sum(sc._sizeof(context, path) for sc in self.subcons) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + fname = f"parse_struct_{code.allocateId()}" + block = f""" + def {fname}(io, this): + result = Container() + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = True, _building = False, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + try: + """ + for sc in self.subcons: + block += f""" + {f'result[{repr(sc.name)}] = this[{repr(sc.name)}] = ' if sc.name else ''}{sc._compileparse(code)} + """ + block += f""" + pass + except StopFieldError: + pass + return result + """ + code.append(block) + return f"{fname}(io, this)" + + def _emitbuild(self, code): + fname = f"build_struct_{code.allocateId()}" + block = f""" + def {fname}(obj, io, this): + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = False, _building = True, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + try: + objdict = obj + """ + for sc in self.subcons: + block += f""" + {f'obj = objdict.get({repr(sc.name)}, None)' if sc.flagbuildnone else f'obj = objdict[{repr(sc.name)}]'} + {f'this[{repr(sc.name)}] = obj' if sc.name else ''} + {f'this[{repr(sc.name)}] = ' if sc.name else ''}{sc._compilebuild(code)} + """ + block += f""" + pass + except StopFieldError: + pass + return this + """ + code.append(block) + return f"{fname}(obj, io, this)" + + def _emitseq(self, ksy, bitwise): + return [sc._compilefulltype(ksy, bitwise) for sc in self.subcons] + + +class Sequence(Construct): + r""" + Sequence of usually un-named constructs. The members are parsed and build in the order they are defined. If a member is named, its parsed value gets inserted into the context. This allows using members that refer to previous members. + + Operator >> can also be used to make Sequences (although not recommended). + + Parses into a ListContainer (list with pretty-printing) where values are in same order as subcons. Builds from a list (not necessarily a ListContainer) where each subcon is given the element at respective position. Size is the sum of all subcon sizes, unless any subcon raises SizeofError. + + This class does context nesting, meaning its members are given access to a new dictionary where the "_" entry points to the outer context. When parsing, each member gets parsed and subcon parse return value is inserted into context under matching key only if the member was named. When building, the matching entry gets inserted into context before subcon gets build, and if subcon build returns a new value (not None) that gets replaced in the context. + + This class exposes subcons as attributes. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) by accessing the struct attributes, under same name. Also note that compiler does not support this feature. See examples. + + This class exposes subcons in the context. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) within other inlined fields using the context. Note that you need to use a lambda (`this` expression is not supported). Also note that compiler does not support this feature. See examples. + + This class supports stopping. If :class:`~construct.core.StopIf` field is a member, and it evaluates its lambda as positive, this class ends parsing or building as successful without processing further fields. + + :param \*subcons: Construct instances, list of members, some can be named + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises KeyError: building a subcon but found no corresponding key in dictionary + + Example:: + + >>> d = Sequence(Byte, Float32b) + >>> d.build([0, 1.23]) + b'\x00?\x9dp\xa4' + >>> d.parse(_) + [0, 1.2300000190734863] # a ListContainer + + >>> d = Sequence( + ... "animal" / Enum(Byte, giraffe=1), + ... ) + >>> d.animal.giraffe + 'giraffe' + >>> d = Sequence( + ... "count" / Byte, + ... "data" / Bytes(lambda this: this.count - this._subcons.count.sizeof()), + ... ) + >>> d.build([3, b"12"]) + b'\x0312' + + Alternative syntax (not recommended): + >>> (Byte >> "Byte >> "c"/Byte >> "d"/Byte) + + Alternative syntax, but requires Python 3.6 or any PyPy: + >>> Sequence(a=Byte, b=Byte, c=Byte, d=Byte) + """ + + def __init__(self, *subcons, **subconskw): + super().__init__() + self.subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + self._subcons = Container((sc.name,sc) for sc in self.subcons if sc.name) + self.flagbuildnone = all(sc.flagbuildnone for sc in self.subcons) + + def __getattr__(self, name): + if name in self._subcons: + return self._subcons[name] + raise AttributeError + + def _parse(self, stream, context, path): + obj = ListContainer() + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + for sc in self.subcons: + try: + subobj = sc._parsereport(stream, context, path) + obj.append(subobj) + if sc.name: + context[sc.name] = subobj + except StopFieldError: + break + return obj + + def _build(self, obj, stream, context, path): + if obj is None: + obj = ListContainer([None for sc in self.subcons]) + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + objiter = iter(obj) + retlist = ListContainer() + for i,sc in enumerate(self.subcons): + try: + subobj = next(objiter) + if sc.name: + context[sc.name] = subobj + + buildret = sc._build(subobj, stream, context, path) + retlist.append(buildret) + + if sc.name: + context[sc.name] = buildret + except StopFieldError: + break + return retlist + + def _sizeof(self, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = None, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + try: + return sum(sc._sizeof(context, path) for sc in self.subcons) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + fname = f"parse_sequence_{code.allocateId()}" + block = f""" + def {fname}(io, this): + result = ListContainer() + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = True, _building = False, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + try: + """ + for sc in self.subcons: + block += f""" + result.append({sc._compileparse(code)}) + """ + if sc.name: + block += f""" + this[{repr(sc.name)}] = result[-1] + """ + block += f""" + pass + except StopFieldError: + pass + return result + """ + code.append(block) + return f"{fname}(io, this)" + + def _emitbuild(self, code): + fname = f"build_sequence_{code.allocateId()}" + block = f""" + def {fname}(obj, io, this): + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = False, _building = True, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + try: + objiter = iter(obj) + retlist = ListContainer() + """ + for sc in self.subcons: + block += f""" + {f'obj = next(objiter)'} + {f'this[{repr(sc.name)}] = obj' if sc.name else ''} + {f'x = '}{sc._compilebuild(code)} + {f'retlist.append(x)'} + {f'this[{repr(sc.name)}] = x' if sc.name else ''} + """ + block += f""" + pass + except StopFieldError: + pass + return retlist + """ + code.append(block) + return f"{fname}(obj, io, this)" + + def _emitseq(self, ksy, bitwise): + return [sc._compilefulltype(ksy, bitwise) for sc in self.subcons] + + +#=============================================================================== +# arrays ranges and repeaters +#=============================================================================== +class Array(Subconstruct): + r""" + Homogenous array of elements, similar to C# generic T[]. + + Parses into a ListContainer (a list). Parsing and building processes an exact amount of elements. If given list has more or less than count elements, raises RangeError. Size is defined as count multiplied by subcon size, but only if subcon is fixed size. + + Operator [] can be used to make Array instances (recommended syntax). + + :param count: integer or context lambda, strict amount of elements + :param subcon: Construct instance, subcon to process individual elements + :param discard: optional, bool, if set then parsing returns empty list + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises RangeError: specified count is not valid + :raises RangeError: given object has different length than specified count + + Can propagate any exception from the lambdas, possibly non-ConstructError. + + Example:: + + >>> d = Array(5, Byte) or Byte[5] + >>> d.build(range(5)) + b'\x00\x01\x02\x03\x04' + >>> d.parse(_) + [0, 1, 2, 3, 4] + """ + + def __init__(self, count, subcon, discard=False): + super().__init__(subcon) + self.count = count + self.discard = discard + + def _parse(self, stream, context, path): + count = evaluate(self.count, context) + if not 0 <= count: + raise RangeError("invalid count %s" % (count,), path=path) + discard = self.discard + obj = ListContainer() + for i in range(count): + context._index = i + e = self.subcon._parsereport(stream, context, path) + if not discard: + obj.append(e) + return obj + + def _build(self, obj, stream, context, path): + count = evaluate(self.count, context) + if not 0 <= count: + raise RangeError("invalid count %s" % (count,), path=path) + if not len(obj) == count: + raise RangeError("expected %d elements, found %d" % (count, len(obj)), path=path) + discard = self.discard + retlist = ListContainer() + for i,e in enumerate(obj): + context._index = i + buildret = self.subcon._build(e, stream, context, path) + if not discard: + retlist.append(buildret) + return retlist + + def _sizeof(self, context, path): + try: + count = evaluate(self.count, context) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + return count * self.subcon._sizeof(context, path) + + def _emitparse(self, code): + return f"ListContainer(({self.subcon._compileparse(code)}) for i in range({self.count}))" + + def _emitbuild(self, code): + return f"ListContainer(reuse(obj[i], lambda obj: ({self.subcon._compilebuild(code)})) for i in range({self.count}))" + + def _emitfulltype(self, ksy, bitwise): + return dict(type=self.subcon._compileprimitivetype(ksy, bitwise), repeat="expr", repeat_expr=self.count) + + +class GreedyRange(Subconstruct): + r""" + Homogenous array of elements, similar to C# generic IEnumerable, but works with unknown count of elements by parsing until end of stream. + + Parses into a ListContainer (a list). Parsing stops when an exception occured when parsing the subcon, either due to EOF or subcon format not being able to parse the data. Either way, when GreedyRange encounters either failure it seeks the stream back to a position after last successful subcon parsing. Builds from enumerable, each element as-is. Size is undefined. + + This class supports stopping. If :class:`~construct.core.StopIf` field is a member, and it evaluates its lambda as positive, this class ends parsing or building as successful without processing further fields. + + :param subcon: Construct instance, subcon to process individual elements + :param discard: optional, bool, if set then parsing returns empty list + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: stream is not seekable and tellable + + Can propagate any exception from the lambdas, possibly non-ConstructError. + + Example:: + + >>> d = GreedyRange(Byte) + >>> d.build(range(8)) + b'\x00\x01\x02\x03\x04\x05\x06\x07' + >>> d.parse(_) + [0, 1, 2, 3, 4, 5, 6, 7] + """ + + def __init__(self, subcon, discard=False): + super().__init__(subcon) + self.discard = discard + + def _parse(self, stream, context, path): + discard = self.discard + obj = ListContainer() + try: + for i in itertools.count(): + context._index = i + fallback = stream_tell(stream, path) + e = self.subcon._parsereport(stream, context, path) + if not discard: + obj.append(e) + except StopFieldError: + pass + except ExplicitError: + raise + except Exception: + stream_seek(stream, fallback, 0, path) + return obj + + def _build(self, obj, stream, context, path): + discard = self.discard + try: + retlist = ListContainer() + for i,e in enumerate(obj): + context._index = i + buildret = self.subcon._build(e, stream, context, path) + if not discard: + retlist.append(buildret) + return retlist + except StopFieldError: + pass + + def _sizeof(self, context, path): + raise SizeofError(path=path) + + def _emitfulltype(self, ksy, bitwise): + return dict(type=self.subcon._compileprimitivetype(ksy, bitwise), repeat="eos") + + +class RepeatUntil(Subconstruct): + r""" + Homogenous array of elements, similar to C# generic IEnumerable, that repeats until the predicate indicates it to stop. Note that the last element (that predicate indicated as True) is included in the return list. + + Parse iterates indefinately until last element passed the predicate. Build iterates indefinately over given list, until an element passed the precicate (or raises RepeatError if no element passed it). Size is undefined. + + :param predicate: lambda that takes (obj, list, context) and returns True to break or False to continue (or a truthy value) + :param subcon: Construct instance, subcon used to parse and build each element + :param discard: optional, bool, if set then parsing returns empty list + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises RepeatError: consumed all elements in the stream but neither passed the predicate + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = RepeatUntil(lambda x,lst,ctx: x > 7, Byte) + >>> d.build(range(20)) + b'\x00\x01\x02\x03\x04\x05\x06\x07\x08' + >>> d.parse(b"\x01\xff\x02") + [1, 255] + + >>> d = RepeatUntil(lambda x,lst,ctx: lst[-2:] == [0,0], Byte) + >>> d.parse(b"\x01\x00\x00\xff") + [1, 0, 0] + """ + + def __init__(self, predicate, subcon, discard=False): + super().__init__(subcon) + self.predicate = predicate + self.discard = discard + + def _parse(self, stream, context, path): + predicate = self.predicate + discard = self.discard + if not callable(predicate): + predicate = lambda _1,_2,_3: predicate + obj = ListContainer() + for i in itertools.count(): + context._index = i + e = self.subcon._parsereport(stream, context, path) + if not discard: + obj.append(e) + if predicate(e, obj, context): + return obj + + def _build(self, obj, stream, context, path): + predicate = self.predicate + discard = self.discard + if not callable(predicate): + predicate = lambda _1,_2,_3: predicate + partiallist = ListContainer() + retlist = ListContainer() + for i,e in enumerate(obj): + context._index = i + buildret = self.subcon._build(e, stream, context, path) + if not discard: + retlist.append(buildret) + partiallist.append(buildret) + if predicate(e, partiallist, context): + break + else: + raise RepeatError("expected any item to match predicate, when building", path=path) + return retlist + + def _sizeof(self, context, path): + raise SizeofError("cannot calculate size, amount depends on actual data", path=path) + + def _emitparse(self, code): + fname = f"parse_repeatuntil_{code.allocateId()}" + block = f""" + def {fname}(io, this): + list_ = ListContainer() + while True: + obj_ = {self.subcon._compileparse(code)} + if not ({self.discard}): + list_.append(obj_) + if ({self.predicate}): + return list_ + """ + code.append(block) + return f"{fname}(io, this)" + + def _emitbuild(self, code): + fname = f"build_repeatuntil_{code.allocateId()}" + block = f""" + def {fname}(obj, io, this): + objiter = iter(obj) + list_ = ListContainer() + while True: + obj_ = reuse(next(objiter), lambda obj: {self.subcon._compilebuild(code)}) + list_.append(obj_) + if ({self.predicate}): + return list_ + """ + code.append(block) + return f"{fname}(obj, io, this)" + + def _emitfulltype(self, ksy, bitwise): + return dict(type=self.subcon._compileprimitivetype(ksy, bitwise), repeat="until", repeat_until=repr(self.predicate).replace("obj_","_")) + + +#=============================================================================== +# specials +#=============================================================================== +class Renamed(Subconstruct): + r""" + Special wrapper that allows a Struct (or other similar class) to see a field as having a name (or a different name) or having a parsed hook. Library classes do not have names (its None). Renamed does not change a field, only wraps it like a candy with a label. Used internally by / and * operators. + + Also this wrapper is responsible for building a path info (a chain of names) that gets attached to error message when parsing, building, or sizeof fails. Fields that are not named do not appear in the path string. + + Parsing building and size are deferred to subcon. + + :param subcon: Construct instance + :param newname: optional, string + :param newdocs: optional, string + :param newparsed: optional, lambda + + Example:: + + >>> "number" / Int32ub + + """ + + def __init__(self, subcon, newname=None, newdocs=None, newparsed=None): + super().__init__(subcon) + self.name = newname if newname else subcon.name + self.docs = newdocs if newdocs else subcon.docs + self.parsed = newparsed if newparsed else subcon.parsed + + def __getattr__(self, name): + return getattr(self.subcon, name) + + def _parse(self, stream, context, path): + path += " -> %s" % (self.name,) + return self.subcon._parsereport(stream, context, path) + + def _build(self, obj, stream, context, path): + path += " -> %s" % (self.name,) + return self.subcon._build(obj, stream, context, path) + + def _sizeof(self, context, path): + path += " -> %s" % (self.name,) + return self.subcon._sizeof(context, path) + + def _emitparse(self, code): + return self.subcon._compileparse(code) + + def _emitbuild(self, code): + return self.subcon._compilebuild(code) + + def _emitseq(self, ksy, bitwise): + return self.subcon._compileseq(ksy, bitwise) + + def _emitprimitivetype(self, ksy, bitwise): + return self.subcon._compileprimitivetype(ksy, bitwise) + + def _emitfulltype(self, ksy, bitwise): + r = dict() + if self.name: + r.update(id=self.name) + r.update(self.subcon._compilefulltype(ksy, bitwise)) + if self.docs: + r.update(doc=self.docs) + return r + + +#=============================================================================== +# miscellaneous +#=============================================================================== +class Const(Subconstruct): + r""" + Field enforcing a constant. It is used for file signatures, to validate that the given pattern exists. Data in the stream must strictly match the specified value. + + Note that a variable sized subcon may still provide positive verification. Const does not consume a precomputed amount of bytes, but depends on the subcon to read the appropriate amount (eg. VarInt is acceptable). Whatever subcon parses into, gets compared against the specified value. + + Parses using subcon and return its value (after checking). Builds using subcon from nothing (or given object, if not None). Size is the same as subcon, unless it raises SizeofError. + + :param value: expected value, usually a bytes literal + :param subcon: optional, Construct instance, subcon used to build value from, assumed to be Bytes if value parameter was a bytes literal + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises ConstError: parsed data does not match specified value, or building from wrong value + :raises StringError: building from non-bytes value, perhaps unicode + + Example:: + + >>> d = Const(b"IHDR") + >>> d.build(None) + b'IHDR' + >>> d.parse(b"JPEG") + construct.core.ConstError: expected b'IHDR' but parsed b'JPEG' + + >>> d = Const(255, Int32ul) + >>> d.build(None) + b'\xff\x00\x00\x00' + """ + + def __init__(self, value, subcon=None): + if subcon is None: + if not isinstance(value, bytestringtype): + raise StringError(f"given non-bytes value {repr(value)}, perhaps unicode?") + subcon = Bytes(len(value)) + super().__init__(subcon) + self.value = value + self.flagbuildnone = True + + def _parse(self, stream, context, path): + obj = self.subcon._parsereport(stream, context, path) + if not obj == self.value: + raise ConstError(f"parsing expected {repr(self.value)} but parsed {repr(obj)}", path=path) + return obj + + def _build(self, obj, stream, context, path): + if obj not in (None, self.value): + raise ConstError(f"building expected None or {repr(self.value)} but got {repr(obj)}", path=path) + return self.subcon._build(self.value, stream, context, path) + + def _sizeof(self, context, path): + return self.subcon._sizeof(context, path) + + def _emitparse(self, code): + code.append(f""" + def parse_const(value, expected): + if not value == expected: raise ConstError + return value + """) + return f"parse_const({self.subcon._compileparse(code)}, {repr(self.value)})" + + def _emitbuild(self, code): + if isinstance(self.value, bytes): + return f"(io.write({repr(self.value)}), {repr(self.value)})[1]" + else: + return f"reuse({repr(self.value)}, lambda obj: {self.subcon._compilebuild(code)})" + + def _emitfulltype(self, ksy, bitwise): + data = self.subcon.build(self.value) + return dict(contents=list(data)) + + +class Computed(Construct): + r""" + Field computing a value from the context dictionary or some outer source like os.urandom or random module. Underlying byte stream is unaffected. The source can be non-deterministic. + + Parsing and Building return the value returned by the context lambda (although a constant value can also be used). Size is defined as 0 because parsing and building does not consume or produce bytes into the stream. + + :param func: context lambda or constant value + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + >>> d = Struct( + ... "width" / Byte, + ... "height" / Byte, + ... "total" / Computed(this.width * this.height), + ... ) + >>> d.build(dict(width=4,height=5)) + b'\x04\x05' + >>> d.parse(b"12") + Container(width=49, height=50, total=2450) + + >>> d = Computed(7) + >>> d.parse(b"") + 7 + >>> d = Computed(lambda ctx: 7) + >>> d.parse(b"") + 7 + + >>> import os + >>> d = Computed(lambda ctx: os.urandom(10)) + >>> d.parse(b"") + b'\x98\xc2\xec\x10\x07\xf5\x8e\x98\xc2\xec' + """ + + def __init__(self, func): + super().__init__() + self.func = func + self.flagbuildnone = True + + def _parse(self, stream, context, path): + return self.func(context) if callable(self.func) else self.func + + def _build(self, obj, stream, context, path): + return self.func(context) if callable(self.func) else self.func + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + return repr(self.func) + + def _emitbuild(self, code): + return repr(self.func) + + +@singleton +class Index(Construct): + r""" + Indexes a field inside outer :class:`~construct.core.Array` :class:`~construct.core.GreedyRange` :class:`~construct.core.RepeatUntil` context. + + Note that you can use this class, or use `this._index` expression instead, depending on how its used. See the examples. + + Parsing and building pulls _index key from the context. Size is 0 because stream is unaffected. + + :raises IndexFieldError: did not find either key in context + + Example:: + + >>> d = Array(3, Index) + >>> d.parse(b"") + [0, 1, 2] + >>> d = Array(3, Struct("i" / Index)) + >>> d.parse(b"") + [Container(i=0), Container(i=1), Container(i=2)] + + >>> d = Array(3, Computed(this._index+1)) + >>> d.parse(b"") + [1, 2, 3] + >>> d = Array(3, Struct("i" / Computed(this._._index+1))) + >>> d.parse(b"") + [Container(i=1), Container(i=2), Container(i=3)] + """ + + def __init__(self): + super().__init__() + self.flagbuildnone = True + + def _parse(self, stream, context, path): + return context.get("_index", None) + + def _build(self, obj, stream, context, path): + return context.get("_index", None) + + def _sizeof(self, context, path): + return 0 + + +class Rebuild(Subconstruct): + r""" + Field where building does not require a value, because the value gets recomputed when needed. Comes handy when building a Struct from a dict with missing keys. Useful for length and count fields when :class:`~construct.core.Prefixed` and :class:`~construct.core.PrefixedArray` cannot be used. + + Parsing defers to subcon. Building is defered to subcon, but it builds from a value provided by the context lambda (or constant). Size is the same as subcon, unless it raises SizeofError. + + Difference between Default and Rebuild, is that in first the build value is optional and in second the build value is ignored. + + :param subcon: Construct instance + :param func: context lambda or constant value + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Struct( + ... "count" / Rebuild(Byte, len_(this.items)), + ... "items" / Byte[this.count], + ... ) + >>> d.build(dict(items=[1,2,3])) + b'\x03\x01\x02\x03' + """ + + def __init__(self, subcon, func): + super().__init__(subcon) + self.func = func + self.flagbuildnone = True + + def _build(self, obj, stream, context, path): + obj = evaluate(self.func, context) + return self.subcon._build(obj, stream, context, path) + + def _emitparse(self, code): + return self.subcon._compileparse(code) + + def _emitbuild(self, code): + return f"reuse({repr(self.func)}, lambda obj: ({self.subcon._compilebuild(code)}))" + + def _emitseq(self, ksy, bitwise): + return self.subcon._compileseq(ksy, bitwise) + + def _emitprimitivetype(self, ksy, bitwise): + return self.subcon._compileprimitivetype(ksy, bitwise) + + def _emitfulltype(self, ksy, bitwise): + return self.subcon._compilefulltype(ksy, bitwise) + + +class Default(Subconstruct): + r""" + Field where building does not require a value, because the value gets taken from default. Comes handy when building a Struct from a dict with missing keys. + + Parsing defers to subcon. Building is defered to subcon, but it builds from a default (if given object is None) or from given object. Building does not require a value, but can accept one. Size is the same as subcon, unless it raises SizeofError. + + Difference between Default and Rebuild, is that in first the build value is optional and in second the build value is ignored. + + :param subcon: Construct instance + :param value: context lambda or constant value + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Struct( + ... "a" / Default(Byte, 0), + ... ) + >>> d.build(dict(a=1)) + b'\x01' + >>> d.build(dict()) + b'\x00' + """ + + def __init__(self, subcon, value): + super().__init__(subcon) + self.value = value + self.flagbuildnone = True + + def _build(self, obj, stream, context, path): + obj = evaluate(self.value, context) if obj is None else obj + return self.subcon._build(obj, stream, context, path) + + def _emitparse(self, code): + return self.subcon._compileparse(code) + + def _emitbuild(self, code): + return f"reuse({repr(self.value)} if obj is None else obj, lambda obj: ({self.subcon._compilebuild(code)}))" + + def _emitseq(self, ksy, bitwise): + return self.subcon._compileseq(ksy, bitwise) + + def _emitprimitivetype(self, ksy, bitwise): + return self.subcon._compileprimitivetype(ksy, bitwise) + + def _emitfulltype(self, ksy, bitwise): + return self.subcon._compilefulltype(ksy, bitwise) + + +class Check(Construct): + r""" + Checks for a condition, and raises CheckError if the check fails. + + Parsing and building return nothing (but check the condition). Size is 0 because stream is unaffected. + + :param func: bool or context lambda, that gets run on parsing and building + + :raises CheckError: lambda returned false + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + Check(lambda ctx: len(ctx.payload.data) == ctx.payload_len) + Check(len_(this.payload.data) == this.payload_len) + """ + + def __init__(self, func): + super().__init__() + self.func = func + self.flagbuildnone = True + + def _parse(self, stream, context, path): + passed = evaluate(self.func, context) + if not passed: + raise CheckError("check failed during parsing", path=path) + + def _build(self, obj, stream, context, path): + passed = evaluate(self.func, context) + if not passed: + raise CheckError("check failed during building", path=path) + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + code.append(f""" + def parse_check(condition): + if not condition: raise CheckError + """) + return f"parse_check({repr(self.func)})" + + def _emitbuild(self, code): + code.append(f""" + def build_check(condition): + if not condition: raise CheckError + """) + return f"build_check({repr(self.func)})" + + +@singleton +class Error(Construct): + r""" + Raises ExplicitError, unconditionally. + + Parsing and building always raise ExplicitError. Size is undefined. + + :raises ExplicitError: unconditionally, on parsing and building + + Example:: + + >>> d = Struct("num"/Byte, Error) + >>> d.parse(b"data...") + construct.core.ExplicitError: Error field was activated during parsing + """ + + def __init__(self): + super().__init__() + self.flagbuildnone = True + + def _parse(self, stream, context, path): + raise ExplicitError("Error field was activated during parsing", path=path) + + def _build(self, obj, stream, context, path): + raise ExplicitError("Error field was activated during building", path=path) + + def _sizeof(self, context, path): + raise SizeofError("Error does not have size, because it interrupts parsing and building", path=path) + + def _emitparse(self, code): + code.append(""" + def parse_error(): + raise ExplicitError + """) + return "parse_error()" + + def _emitbuild(self, code): + code.append(""" + def build_error(): + raise ExplicitError + """) + return "build_error()" + + +class FocusedSeq(Construct): + r""" + Allows constructing more elaborate "adapters" than Adapter class. + + Parse does parse all subcons in sequence, but returns only the element that was selected (discards other values). Build does build all subcons in sequence, where each gets build from nothing (except the selected subcon which is given the object). Size is the sum of all subcon sizes, unless any subcon raises SizeofError. + + This class does context nesting, meaning its members are given access to a new dictionary where the "_" entry points to the outer context. When parsing, each member gets parsed and subcon parse return value is inserted into context under matching key only if the member was named. When building, the matching entry gets inserted into context before subcon gets build, and if subcon build returns a new value (not None) that gets replaced in the context. + + This class exposes subcons as attributes. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) by accessing the struct attributes, under same name. Also note that compiler does not support this feature. See examples. + + This class exposes subcons in the context. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) within other inlined fields using the context. Note that you need to use a lambda (`this` expression is not supported). Also note that compiler does not support this feature. See examples. + + This class is used internally to implement :class:`~construct.core.PrefixedArray`. + + :param parsebuildfrom: string name or context lambda, selects a subcon + :param \*subcons: Construct instances, list of members, some can be named + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises UnboundLocalError: selector does not match any subcon + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Excample:: + + >>> d = FocusedSeq("num", Const(b"SIG"), "num"/Byte, Terminated) + >>> d.parse(b"SIG\xff") + 255 + >>> d.build(255) + b'SIG\xff' + + >>> d = FocusedSeq("animal", + ... "animal" / Enum(Byte, giraffe=1), + ... ) + >>> d.animal.giraffe + 'giraffe' + >>> d = FocusedSeq("count", + ... "count" / Byte, + ... "data" / Padding(lambda this: this.count - this._subcons.count.sizeof()), + ... ) + >>> d.build(4) + b'\x04\x00\x00\x00' + + PrefixedArray <--> FocusedSeq("items", + "count" / Rebuild(lengthfield, len_(this.items)), + "items" / subcon[this.count], + ) + """ + + def __init__(self, parsebuildfrom, *subcons, **subconskw): + super().__init__() + self.parsebuildfrom = parsebuildfrom + self.subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + self._subcons = Container((sc.name,sc) for sc in self.subcons if sc.name) + + def __getattr__(self, name): + if name in self._subcons: + return self._subcons[name] + raise AttributeError + + def _parse(self, stream, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + parsebuildfrom = evaluate(self.parsebuildfrom, context) + for i,sc in enumerate(self.subcons): + parseret = sc._parsereport(stream, context, path) + if sc.name: + context[sc.name] = parseret + if sc.name == parsebuildfrom: + finalret = parseret + return finalret + + def _build(self, obj, stream, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + parsebuildfrom = evaluate(self.parsebuildfrom, context) + context[parsebuildfrom] = obj + for i,sc in enumerate(self.subcons): + buildret = sc._build(obj if sc.name == parsebuildfrom else None, stream, context, path) + if sc.name: + context[sc.name] = buildret + if sc.name == parsebuildfrom: + finalret = buildret + return finalret + + def _sizeof(self, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = None, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + try: + return sum(sc._sizeof(context, path) for sc in self.subcons) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + fname = f"parse_focusedseq_{code.allocateId()}" + block = f""" + def {fname}(io, this): + result = [] + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = True, _building = False, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + """ + for sc in self.subcons: + block += f""" + result.append({sc._compileparse(code)}) + """ + if sc.name: + block += f""" + this[{repr(sc.name)}] = result[-1] + """ + block += f""" + return this[{repr(self.parsebuildfrom)}] + """ + code.append(block) + return f"{fname}(io, this)" + + def _emitbuild(self, code): + fname = f"build_focusedseq_{code.allocateId()}" + block = f""" + def {fname}(obj, io, this): + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = False, _building = True, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + try: + this[{repr(self.parsebuildfrom)}] = obj + finalobj = obj + """ + for sc in self.subcons: + block += f""" + {f'obj = {"finalobj" if sc.name == self.parsebuildfrom else "None"}'} + {f'buildret = '}{sc._compilebuild(code)} + {f'this[{repr(sc.name)}] = buildret' if sc.name else ''} + {f'{"finalret = buildret" if sc.name == self.parsebuildfrom else ""}'} + """ + block += f""" + pass + except StopFieldError: + pass + return finalret + """ + code.append(block) + return f"{fname}(obj, io, this)" + + def _emitseq(self, ksy, bitwise): + return [sc._compilefulltype(ksy, bitwise) for sc in self.subcons] + + +@singleton +class Pickled(Construct): + r""" + Preserves arbitrary Python objects. + + Parses using `pickle.load() `_ and builds using `pickle.dump() `_ functions, using default Pickle binary protocol. Size is undefined. + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Can propagate pickle.load() and pickle.dump() exceptions. + + Example:: + + >>> x = [1, 2.3, {}] + >>> Pickled.build(x) + b'\x80\x03]q\x00(K\x01G@\x02ffffff}q\x01e.' + >>> Pickled.parse(_) + [1, 2.3, {}] + """ + + def _parse(self, stream, context, path): + return pickle.load(stream) + + def _build(self, obj, stream, context, path): + pickle.dump(obj, stream) + return obj + + +@singleton +class Numpy(Construct): + r""" + Preserves numpy arrays (both shape, dtype and values). + + Parses using `numpy.load() `_ and builds using `numpy.save() `_ functions, using Numpy binary protocol. Size is undefined. + + :raises ImportError: numpy could not be imported during parsing or building + :raises ValueError: could not read enough bytes, or so + + Can propagate numpy.load() and numpy.save() exceptions. + + Example:: + + >>> import numpy + >>> a = numpy.asarray([1,2,3]) + >>> Numpy.build(a) + b"\x93NUMPY\x01\x00F\x00{'descr': '>> Numpy.parse(_) + array([1, 2, 3]) + """ + + def _parse(self, stream, context, path): + import numpy + return numpy.load(stream) + + def _build(self, obj, stream, context, path): + import numpy + numpy.save(stream, obj) + return obj + + +class NamedTuple(Adapter): + r""" + Both arrays, structs, and sequences can be mapped to a namedtuple from `collections module `_. To create a named tuple, you need to provide a name and a sequence of fields, either a string with space-separated names or a list of string names, like the standard namedtuple. + + Parses into a collections.namedtuple instance, and builds from such instance (although it also builds from lists and dicts). Size is undefined. + + :param tuplename: string + :param tuplefields: string or list of strings + :param subcon: Construct instance, either Struct Sequence Array GreedyRange + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises NamedTupleError: subcon is neither Struct Sequence Array GreedyRange + + Can propagate collections exceptions. + + Example:: + + >>> d = NamedTuple("coord", "x y z", Byte[3]) + >>> d = NamedTuple("coord", "x y z", Byte >> Byte >> Byte) + >>> d = NamedTuple("coord", "x y z", "x"/Byte + "y"/Byte + "z"/Byte) + >>> d.parse(b"123") + coord(x=49, y=50, z=51) + """ + + def __init__(self, tuplename, tuplefields, subcon): + if not isinstance(subcon, (Struct,Sequence,Array,GreedyRange)): + raise NamedTupleError("subcon is neither Struct Sequence Array GreedyRange") + super().__init__(subcon) + self.tuplename = tuplename + self.tuplefields = tuplefields + self.factory = collections.namedtuple(tuplename, tuplefields) + + def _decode(self, obj, context, path): + if isinstance(self.subcon, Struct): + del obj["_io"] + return self.factory(**obj) + if isinstance(self.subcon, (Sequence,Array,GreedyRange)): + return self.factory(*obj) + raise NamedTupleError("subcon is neither Struct Sequence Array GreedyRangeGreedyRange", path=path) + + def _encode(self, obj, context, path): + if isinstance(self.subcon, Struct): + return Container({sc.name:getattr(obj,sc.name) for sc in self.subcon.subcons if sc.name}) + if isinstance(self.subcon, (Sequence,Array,GreedyRange)): + return list(obj) + raise NamedTupleError("subcon is neither Struct Sequence Array GreedyRange", path=path) + + def _emitparse(self, code): + fname = "factory_%s" % code.allocateId() + code.append(""" + %s = collections.namedtuple(%r, %r) + """ % (fname, self.tuplename, self.tuplefields, )) + if isinstance(self.subcon, Struct): + return "%s(**(%s))" % (fname, self.subcon._compileparse(code), ) + if isinstance(self.subcon, (Sequence,Array,GreedyRange)): + return "%s(*(%s))" % (fname, self.subcon._compileparse(code), ) + raise NamedTupleError("subcon is neither Struct Sequence Array GreedyRange") + + def _emitseq(self, ksy, bitwise): + return self.subcon._compileseq(ksy, bitwise) + + def _emitprimitivetype(self, ksy, bitwise): + return self.subcon._compileprimitivetype(ksy, bitwise) + + def _emitfulltype(self, ksy, bitwise): + return self.subcon._compilefulltype(ksy, bitwise) + + +class TimestampAdapter(Adapter): + """Used internally.""" + + +def Timestamp(subcon, unit, epoch): + r""" + Datetime, represented as `Arrow `_ object. + + Note that accuracy is not guaranteed, because building rounds the value to integer (even when Float subcon is used), due to floating-point errors in general, and because MSDOS scheme has only 5-bit (32 values) seconds field (seconds are rounded to multiple of 2). + + Unit is a fraction of a second. 1 is second resolution, 10**-3 is milliseconds resolution, 10**-6 is microseconds resolution, etc. Usually its 1 on Unix and MacOSX, 10**-7 on Windows. Epoch is a year (if integer) or a specific day (if Arrow object). Usually its 1970 on Unix, 1904 on MacOSX, 1600 on Windows. MSDOS format doesnt support custom unit or epoch, it uses 2-seconds resolution and 1980 epoch. + + :param subcon: Construct instance like Int* Float*, or Int32ub with msdos format + :param unit: integer or float, or msdos string + :param epoch: integer, or Arrow instance, or msdos string + + :raises ImportError: arrow could not be imported during ctor + :raises TimestampError: subcon is not a Construct instance + :raises TimestampError: unit or epoch is a wrong type + + Example:: + + >>> d = Timestamp(Int64ub, 1., 1970) + >>> d.parse(b'\x00\x00\x00\x00ZIz\x00') + + >>> d = Timestamp(Int32ub, "msdos", "msdos") + >>> d.parse(b'H9\x8c"') + + """ + import arrow + + if not isinstance(subcon, Construct): + raise TimestampError("subcon should be Int*, experimentally Float*, or Int32ub when using msdos format") + if not isinstance(unit, (integertypes, float, stringtypes)): + raise TimestampError("unit must be one of: int float string") + if not isinstance(epoch, (integertypes, arrow.Arrow, stringtypes)): + raise TimestampError("epoch must be one of: int Arrow string") + + if unit == "msdos" or epoch == "msdos": + st = BitStruct( + "year" / BitsInteger(7), + "month" / BitsInteger(4), + "day" / BitsInteger(5), + "hour" / BitsInteger(5), + "minute" / BitsInteger(6), + "second" / BitsInteger(5), + ) + class MsdosTimestampAdapter(TimestampAdapter): + def _decode(self, obj, context, path): + return arrow.Arrow(1980,1,1).shift(years=obj.year, months=obj.month-1, days=obj.day-1, hours=obj.hour, minutes=obj.minute, seconds=obj.second*2) + def _encode(self, obj, context, path): + t = obj.timetuple() + return Container(year=t.tm_year-1980, month=t.tm_mon, day=t.tm_mday, hour=t.tm_hour, minute=t.tm_min, second=t.tm_sec//2) + macro = MsdosTimestampAdapter(st) + + else: + if isinstance(epoch, integertypes): + epoch = arrow.Arrow(epoch, 1, 1) + class EpochTimestampAdapter(TimestampAdapter): + def _decode(self, obj, context, path): + return epoch.shift(seconds=obj*unit) + def _encode(self, obj, context, path): + return int((obj-epoch).total_seconds()/unit) + macro = EpochTimestampAdapter(subcon) + + def _emitfulltype(ksy, bitwise): + return subcon._compilefulltype(ksy, bitwise) + def _emitprimitivetype(ksy, bitwise): + return subcon._compileprimitivetype(ksy, bitwise) + macro._emitfulltype = _emitfulltype + macro._emitprimitivetype = _emitprimitivetype + return macro + + +class Hex(Adapter): + r""" + Adapter for displaying hexadecimal/hexlified representation of integers/bytes/RawCopy dictionaries. + + Parsing results in int-alike bytes-alike or dict-alike object, whose only difference from original is pretty-printing. If you look at the result, you will be presented with its `repr` which remains as-is. If you print it, then you will see its `str` whic is a hexlified representation. Building and sizeof defer to subcon. + + To obtain a hexlified string (like before Hex HexDump changed semantics) use binascii.(un)hexlify on parsed results. + + Example:: + + >>> d = Hex(Int32ub) + >>> obj = d.parse(b"\x00\x00\x01\x02") + >>> obj + 258 + >>> print(obj) + 0x00000102 + + >>> d = Hex(GreedyBytes) + >>> obj = d.parse(b"\x00\x00\x01\x02") + >>> obj + b'\x00\x00\x01\x02' + >>> print(obj) + unhexlify('00000102') + + >>> d = Hex(RawCopy(Int32ub)) + >>> obj = d.parse(b"\x00\x00\x01\x02") + >>> obj + {'data': b'\x00\x00\x01\x02', + 'length': 4, + 'offset1': 0, + 'offset2': 4, + 'value': 258} + >>> print(obj) + unhexlify('00000102') + """ + def _decode(self, obj, context, path): + if isinstance(obj, integertypes): + return HexDisplayedInteger.new(obj, "0%sX" % (2 * self.subcon._sizeof(context, path))) + if isinstance(obj, bytestringtype): + return HexDisplayedBytes(obj) + if isinstance(obj, dict): + return HexDisplayedDict(obj) + return obj + + def _encode(self, obj, context, path): + return obj + + def _emitparse(self, code): + return self.subcon._compileparse(code) + + def _emitseq(self, ksy, bitwise): + return self.subcon._compileseq(ksy, bitwise) + + def _emitprimitivetype(self, ksy, bitwise): + return self.subcon._compileprimitivetype(ksy, bitwise) + + def _emitfulltype(self, ksy, bitwise): + return self.subcon._compilefulltype(ksy, bitwise) + + +class HexDump(Adapter): + r""" + Adapter for displaying hexlified representation of bytes/RawCopy dictionaries. + + Parsing results in bytes-alike or dict-alike object, whose only difference from original is pretty-printing. If you look at the result, you will be presented with its `repr` which remains as-is. If you print it, then you will see its `str` whic is a hexlified representation. Building and sizeof defer to subcon. + + To obtain a hexlified string (like before Hex HexDump changed semantics) use construct.lib.hexdump on parsed results. + + Example:: + + >>> d = HexDump(GreedyBytes) + >>> obj = d.parse(b"\x00\x00\x01\x02") + >>> obj + b'\x00\x00\x01\x02' + >>> print(obj) + hexundump(''' + 0000 00 00 01 02 .... + ''') + + >>> d = HexDump(RawCopy(Int32ub)) + >>> obj = d.parse(b"\x00\x00\x01\x02") + >>> obj + {'data': b'\x00\x00\x01\x02', + 'length': 4, + 'offset1': 0, + 'offset2': 4, + 'value': 258} + >>> print(obj) + hexundump(''' + 0000 00 00 01 02 .... + ''') + """ + def _decode(self, obj, context, path): + if isinstance(obj, bytestringtype): + return HexDumpDisplayedBytes(obj) + if isinstance(obj, dict): + return HexDumpDisplayedDict(obj) + return obj + + def _encode(self, obj, context, path): + return obj + + def _emitparse(self, code): + return self.subcon._compileparse(code) + + def _emitseq(self, ksy, bitwise): + return self.subcon._compileseq(ksy, bitwise) + + def _emitprimitivetype(self, ksy, bitwise): + return self.subcon._compileprimitivetype(ksy, bitwise) + + def _emitfulltype(self, ksy, bitwise): + return self.subcon._compilefulltype(ksy, bitwise) + + +#=============================================================================== +# conditional +#=============================================================================== +class Union(Construct): + r""" + Treats the same data as multiple constructs (similar to C union) so you can look at the data in multiple views. Fields are usually named (so parsed values are inserted into dictionary under same name). + + Parses subcons in sequence, and reverts the stream back to original position after each subcon. Afterwards, advances the stream by selected subcon. Builds from first subcon that has a matching key in given dict. Size is undefined (because parsefrom is not used for building). + + This class does context nesting, meaning its members are given access to a new dictionary where the "_" entry points to the outer context. When parsing, each member gets parsed and subcon parse return value is inserted into context under matching key only if the member was named. When building, the matching entry gets inserted into context before subcon gets build, and if subcon build returns a new value (not None) that gets replaced in the context. + + This class exposes subcons as attributes. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) by accessing the struct attributes, under same name. Also note that compiler does not support this feature. See examples. + + This class exposes subcons in the context. You can refer to subcons that were inlined (and therefore do not exist as variable in the namespace) within other inlined fields using the context. Note that you need to use a lambda (`this` expression is not supported). Also note that compiler does not support this feature. See examples. + + .. warning:: If you skip `parsefrom` parameter then stream will be left back at starting offset, not seeked to any common denominator. + + :param parsefrom: how to leave stream after parsing, can be integer index or string name selecting a subcon, or None (leaves stream at initial offset, the default), or context lambda + :param \*subcons: Construct instances, list of members, some can be anonymous + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: stream is not seekable and tellable + :raises UnionError: selector does not match any subcon, or dict given to build does not contain any keys matching any subcon + :raises IndexError: selector does not match any subcon + :raises KeyError: selector does not match any subcon + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Union(0, + ... "raw" / Bytes(8), + ... "ints" / Int32ub[2], + ... "shorts" / Int16ub[4], + ... "chars" / Byte[8], + ... ) + >>> d.parse(b"12345678") + Container(raw=b'12345678', ints=[825373492, 892745528], shorts=[12594, 13108, 13622, 14136], chars=[49, 50, 51, 52, 53, 54, 55, 56]) + >>> d.build(dict(chars=range(8))) + b'\x00\x01\x02\x03\x04\x05\x06\x07' + + >>> d = Union(None, + ... "animal" / Enum(Byte, giraffe=1), + ... ) + >>> d.animal.giraffe + 'giraffe' + >>> d = Union(None, + ... "chars" / Byte[4], + ... "data" / Bytes(lambda this: this._subcons.chars.sizeof()), + ... ) + >>> d.parse(b"\x01\x02\x03\x04") + Container(chars=[1, 2, 3, 4], data=b'\x01\x02\x03\x04') + + Alternative syntax, but requires Python 3.6 or any PyPy: + >>> Union(0, raw=Bytes(8), ints=Int32ub[2], shorts=Int16ub[4], chars=Byte[8]) + """ + + def __init__(self, parsefrom, *subcons, **subconskw): + if isinstance(parsefrom, Construct): + raise UnionError("parsefrom should be either: None int str context-function") + super().__init__() + self.parsefrom = parsefrom + self.subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + self._subcons = Container((sc.name,sc) for sc in self.subcons if sc.name) + + def __getattr__(self, name): + if name in self._subcons: + return self._subcons[name] + raise AttributeError + + def _parse(self, stream, context, path): + obj = Container() + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + fallback = stream_tell(stream, path) + forwards = {} + for i,sc in enumerate(self.subcons): + subobj = sc._parsereport(stream, context, path) + if sc.name: + obj[sc.name] = subobj + context[sc.name] = subobj + forwards[i] = stream_tell(stream, path) + if sc.name: + forwards[sc.name] = stream_tell(stream, path) + stream_seek(stream, fallback, 0, path) + parsefrom = evaluate(self.parsefrom, context) + if parsefrom is not None: + stream_seek(stream, forwards[parsefrom], 0, path) # raises KeyError + return obj + + def _build(self, obj, stream, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + context.update(obj) + for sc in self.subcons: + if sc.flagbuildnone: + subobj = obj.get(sc.name, None) + elif sc.name in obj: + subobj = obj[sc.name] + else: + continue + + if sc.name: + context[sc.name] = subobj + + buildret = sc._build(subobj, stream, context, path) + if sc.name: + context[sc.name] = buildret + return Container({sc.name:buildret}) + else: + raise UnionError("cannot build, none of subcons were found in the dictionary %r" % (obj, ), path=path) + + def _sizeof(self, context, path): + raise SizeofError("Union builds depending on actual object dict, size is unknown", path=path) + + def _emitparse(self, code): + if callable(self.parsefrom): + raise NotImplementedError("Union does not compile non-constant parsefrom") + fname = "parse_union_%s" % code.allocateId() + block = """ + def %s(io, this): + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = True, _building = False, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + fallback = io.tell() + """ % (fname, ) + if isinstance(self.parsefrom, type(None)): + index = -1 + skipfallback = False + skipforward = True + if isinstance(self.parsefrom, int): + index = self.parsefrom + self.subcons[index] # raises IndexError + skipfallback = True + skipforward = self.subcons[index].sizeof() == self.subcons[-1].sizeof() + if isinstance(self.parsefrom, str): + index = {sc.name:i for i,sc in enumerate(self.subcons) if sc.name}[self.parsefrom] # raises KeyError + skipfallback = True + skipforward = self.subcons[index].sizeof() == self.subcons[-1].sizeof() + + for i,sc in enumerate(self.subcons): + block += """ + %s%s + """ % ("this[%r] = " % sc.name if sc.name else "", sc._compileparse(code)) + if i == index and not skipforward: + block += """ + forward = io.tell() + """ + if i < len(self.subcons)-1: + block += """ + io.seek(fallback) + """ + if not skipfallback: + block += """ + io.seek(fallback) + """ + if not skipforward: + block += """ + io.seek(forward) + """ + block += """ + del this['_'] + del this['_index'] + return this + """ + code.append(block) + return "%s(io, this)" % (fname,) + + def _emitbuild(self, code): + fname = f"build_union_{code.allocateId()}" + block = f""" + def {fname}(obj, io, this): + this = Container(_ = this, _params = this['_params'], _root = None, _parsing = False, _building = True, _sizing = False, _subcons = None, _io = io, _index = this.get('_index', None)) + this['_root'] = this['_'].get('_root', this) + this.update(obj) + objdict = obj + """ + for sc in self.subcons: + block += f""" + if {'True' if sc.flagbuildnone else f'{repr(sc.name)} in objdict'}: + {f'obj = objdict.get({repr(sc.name)}, None)' if sc.flagbuildnone else f'obj = objdict[{repr(sc.name)}]'} + {f'this[{repr(sc.name)}] = obj' if sc.name else ''} + {f'buildret = this[{repr(sc.name)}] = ' if sc.name else ''}{sc._compilebuild(code)} + {f'return Container({{ {repr(sc.name)}:buildret }})'} + """ + block += f""" + raise UnionError('cannot build, none of subcons were found in the dictionary') + """ + code.append(block) + return f"{fname}(obj, io, this)" + + +class Select(Construct): + r""" + Selects the first matching subconstruct. + + Parses and builds by literally trying each subcon in sequence until one of them parses or builds without exception. Stream gets reverted back to original position after each failed attempt, but not if parsing succeeds. Size is not defined. + + :param \*subcons: Construct instances, list of members, some can be anonymous + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: stream is not seekable and tellable + :raises SelectError: neither subcon succeded when parsing or building + + Example:: + + >>> d = Select(Int32ub, CString("utf8")) + >>> d.build(1) + b'\x00\x00\x00\x01' + >>> d.build(u"Афон") + b'\xd0\x90\xd1\x84\xd0\xbe\xd0\xbd\x00' + + Alternative syntax, but requires Python 3.6 or any PyPy: + >>> Select(num=Int32ub, text=CString("utf8")) + """ + + def __init__(self, *subcons, **subconskw): + super().__init__() + self.subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + self.flagbuildnone = any(sc.flagbuildnone for sc in self.subcons) + + def _parse(self, stream, context, path): + for sc in self.subcons: + fallback = stream_tell(stream, path) + try: + obj = sc._parsereport(stream, context, path) + except ExplicitError: + raise + except ConstructError: + stream_seek(stream, fallback, 0, path) + else: + return obj + raise SelectError("no subconstruct matched", path=path) + + def _build(self, obj, stream, context, path): + for sc in self.subcons: + try: + data = sc.build(obj, **context) + except ExplicitError: + raise + except Exception: + pass + else: + stream_write(stream, data, len(data), path) + return obj + raise SelectError("no subconstruct matched: %s" % (obj,), path=path) + + +def Optional(subcon): + r""" + Makes an optional field. + + Parsing attempts to parse subcon. If sub-parsing fails, returns None and reports success. Building attempts to build subcon. If sub-building fails, writes nothing and reports success. Size is undefined, because whether bytes would be consumed or produced depends on actual data and actual context. + + :param subcon: Construct instance + + Example:: + + Optional <--> Select(subcon, Pass) + + >>> d = Optional(Int64ul) + >>> d.parse(b"12345678") + 4050765991979987505 + >>> d.parse(b"") + None + >>> d.build(1) + b'\x01\x00\x00\x00\x00\x00\x00\x00' + >>> d.build(None) + b'' + """ + return Select(subcon, Pass) + + +def If(condfunc, subcon): + r""" + If-then conditional construct. + + Parsing evaluates condition, if True then subcon is parsed, otherwise just returns None. Building also evaluates condition, if True then subcon gets build from, otherwise does nothing. Size is either same as subcon or 0, depending how condfunc evaluates. + + :param condfunc: bool or context lambda (or a truthy value) + :param subcon: Construct instance, used if condition indicates True + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + If <--> IfThenElse(condfunc, subcon, Pass) + + >>> d = If(this.x > 0, Byte) + >>> d.build(255, x=1) + b'\xff' + >>> d.build(255, x=0) + b'' + """ + macro = IfThenElse(condfunc, subcon, Pass) + + def _emitfulltype(ksy, bitwise): + return dict(type=subcon._compileprimitivetype(ksy, bitwise), if_=repr(condfunc).replace("this.","")) + macro._emitfulltype = _emitfulltype + + return macro + + +class IfThenElse(Construct): + r""" + If-then-else conditional construct, similar to ternary operator. + + Parsing and building evaluates condition, and defers to either subcon depending on the value. Size is computed the same way. + + :param condfunc: bool or context lambda (or a truthy value) + :param thensubcon: Construct instance, used if condition indicates True + :param elsesubcon: Construct instance, used if condition indicates False + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = IfThenElse(this.x > 0, VarInt, Byte) + >>> d.build(255, dict(x=1)) + b'\xff\x01' + >>> d.build(255, dict(x=0)) + b'\xff' + """ + + def __init__(self, condfunc, thensubcon, elsesubcon): + super().__init__() + self.condfunc = condfunc + self.thensubcon = thensubcon + self.elsesubcon = elsesubcon + self.flagbuildnone = thensubcon.flagbuildnone and elsesubcon.flagbuildnone + + def _parse(self, stream, context, path): + condfunc = evaluate(self.condfunc, context) + sc = self.thensubcon if condfunc else self.elsesubcon + return sc._parsereport(stream, context, path) + + def _build(self, obj, stream, context, path): + condfunc = evaluate(self.condfunc, context) + sc = self.thensubcon if condfunc else self.elsesubcon + return sc._build(obj, stream, context, path) + + def _sizeof(self, context, path): + condfunc = evaluate(self.condfunc, context) + sc = self.thensubcon if condfunc else self.elsesubcon + return sc._sizeof(context, path) + + def _emitparse(self, code): + return "((%s) if (%s) else (%s))" % (self.thensubcon._compileparse(code), self.condfunc, self.elsesubcon._compileparse(code), ) + + def _emitbuild(self, code): + return f"(({self.thensubcon._compilebuild(code)}) if ({repr(self.condfunc)}) else ({self.elsesubcon._compilebuild(code)}))" + + def _emitseq(self, ksy, bitwise): + return [ + dict(id="thenvalue", type=self.thensubcon._compileprimitivetype(ksy, bitwise), if_=repr(self.condfunc).replace("this.","")), + dict(id="elsesubcon", type=self.elsesubcon._compileprimitivetype(ksy, bitwise), if_=repr(~self.condfunc).replace("this.","")), + ] + + +class Switch(Construct): + r""" + A conditional branch. + + Parsing and building evaluate keyfunc and select a subcon based on the value and dictionary entries. Dictionary (cases) maps values into subcons. If no case matches then `default` is used (that is Pass by default). Note that `default` is a Construct instance, not a dictionary key. Size is evaluated in same way as parsing and building, by evaluating keyfunc and selecting a field accordingly. + + :param keyfunc: context lambda or constant, that matches some key in cases + :param cases: dict mapping keys to Construct instances + :param default: optional, Construct instance, used when keyfunc is not found in cases, Pass is default value for this parameter, Error is a possible value for this parameter + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Switch(this.n, { 1:Int8ub, 2:Int16ub, 4:Int32ub }) + >>> d.build(5, n=1) + b'\x05' + >>> d.build(5, n=4) + b'\x00\x00\x00\x05' + + >>> d = Switch(this.n, {}, default=Byte) + >>> d.parse(b"\x01", n=255) + 1 + >>> d.build(1, n=255) + b"\x01" + """ + + def __init__(self, keyfunc, cases, default=None): + if default is None: + default = Pass + super().__init__() + self.keyfunc = keyfunc + self.cases = cases + self.default = default + allcases = list(cases.values()) + [default] + self.flagbuildnone = all(sc.flagbuildnone for sc in allcases) + + def _parse(self, stream, context, path): + keyfunc = evaluate(self.keyfunc, context) + sc = self.cases.get(keyfunc, self.default) + return sc._parsereport(stream, context, path) + + def _build(self, obj, stream, context, path): + keyfunc = evaluate(self.keyfunc, context) + sc = self.cases.get(keyfunc, self.default) + return sc._build(obj, stream, context, path) + + def _sizeof(self, context, path): + try: + keyfunc = evaluate(self.keyfunc, context) + sc = self.cases.get(keyfunc, self.default) + return sc._sizeof(context, path) + + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + fname = f"switch_cases_{code.allocateId()}" + code.append(f"{fname} = {{}}") + for key,sc in self.cases.items(): + code.append(f"{fname}[{repr(key)}] = lambda io,this: {sc._compileparse(code)}") + defaultfname = f"switch_defaultcase_{code.allocateId()}" + code.append(f"{defaultfname} = lambda io,this: {self.default._compileparse(code)}") + return f"{fname}.get({repr(self.keyfunc)}, {defaultfname})(io, this)" + + def _emitbuild(self, code): + fname = f"switch_cases_{code.allocateId()}" + code.append(f"{fname} = {{}}") + for key,sc in self.cases.items(): + code.append(f"{fname}[{repr(key)}] = lambda obj,io,this: {sc._compilebuild(code)}") + defaultfname = f"switch_defaultcase_{code.allocateId()}" + code.append(f"{defaultfname} = lambda obj,io,this: {self.default._compilebuild(code)}") + return f"{fname}.get({repr(self.keyfunc)}, {defaultfname})(obj, io, this)" + + +class StopIf(Construct): + r""" + Checks for a condition, and stops certain classes (:class:`~construct.core.Struct` :class:`~construct.core.Sequence` :class:`~construct.core.GreedyRange`) from parsing or building further. + + Parsing and building check the condition, and raise StopFieldError if indicated. Size is undefined. + + :param condfunc: bool or context lambda (or truthy value) + + :raises StopFieldError: used internally + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> Struct('x'/Byte, StopIf(this.x == 0), 'y'/Byte) + >>> Sequence('x'/Byte, StopIf(this.x == 0), 'y'/Byte) + >>> GreedyRange(FocusedSeq(0, 'x'/Byte, StopIf(this.x == 0))) + """ + + def __init__(self, condfunc): + super().__init__() + self.condfunc = condfunc + self.flagbuildnone = True + + def _parse(self, stream, context, path): + condfunc = evaluate(self.condfunc, context) + if condfunc: + raise StopFieldError(path=path) + + def _build(self, obj, stream, context, path): + condfunc = evaluate(self.condfunc, context) + if condfunc: + raise StopFieldError(path=path) + + def _sizeof(self, context, path): + raise SizeofError("StopIf cannot determine size because it depends on actual context which then depends on actual data and outer constructs", path=path) + + def _emitparse(self, code): + code.append(f""" + def parse_stopif(condition): + if condition: + raise StopFieldError + """) + return f"parse_stopif({repr(self.condfunc)})" + + def _emitbuild(self, code): + code.append(f""" + def build_stopif(condition): + if condition: + raise StopFieldError + """) + return f"build_stopif({repr(self.condfunc)})" + + +#=============================================================================== +# alignment and padding +#=============================================================================== +def Padding(length, pattern=b"\x00"): + r""" + Appends null bytes. + + Parsing consumes specified amount of bytes and discards it. Building writes specified pattern byte multiplied into specified length. Size is same as specified. + + :param length: integer or context lambda, length of the padding + :param pattern: b-character, padding pattern, default is \\x00 + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises PaddingError: length was negative + :raises PaddingError: pattern was not bytes (b-character) + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Padding(4) or Padded(4, Pass) + >>> d.build(None) + b'\x00\x00\x00\x00' + >>> d.parse(b"****") + None + >>> d.sizeof() + 4 + """ + macro = Padded(length, Pass, pattern=pattern) + def _emitprimitivetype(ksy, bitwise): + if not bitwise: + raise NotImplementedError + return "b%s" % (length, ) + def _emitfulltype(ksy, bitwise): + if bitwise: + raise NotImplementedError + return dict(size=length) + macro._emitprimitivetype = _emitprimitivetype + macro._emitfulltype = _emitfulltype + return macro + + +class Padded(Subconstruct): + r""" + Appends additional null bytes to achieve a length. + + Parsing first parses the subcon, then uses stream.tell() to measure how many bytes were read and consumes additional bytes accordingly. Building first builds the subcon, then uses stream.tell() to measure how many bytes were written and produces additional bytes accordingly. Size is same as `length`, but negative amount results in error. Note that subcon can actually be variable size, it is the eventual amount of bytes that is read or written during parsing or building that determines actual padding. + + :param length: integer or context lambda, length of the padding + :param subcon: Construct instance + :param pattern: optional, b-character, padding pattern, default is \\x00 + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises PaddingError: length is negative + :raises PaddingError: subcon read or written more than the length (would cause negative pad) + :raises PaddingError: pattern is not bytes of length 1 + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Padded(4, Byte) + >>> d.build(255) + b'\xff\x00\x00\x00' + >>> d.parse(_) + 255 + >>> d.sizeof() + 4 + + >>> d = Padded(4, VarInt) + >>> d.build(1) + b'\x01\x00\x00\x00' + >>> d.build(70000) + b'\xf0\xa2\x04\x00' + """ + + def __init__(self, length, subcon, pattern=b"\x00"): + if not isinstance(pattern, bytestringtype) or len(pattern) != 1: + raise PaddingError("pattern expected to be bytes of length 1") + super().__init__(subcon) + self.length = length + self.pattern = pattern + + def _parse(self, stream, context, path): + length = evaluate(self.length, context) + if length < 0: + raise PaddingError("length cannot be negative", path=path) + position1 = stream_tell(stream, path) + obj = self.subcon._parsereport(stream, context, path) + position2 = stream_tell(stream, path) + pad = length - (position2 - position1) + if pad < 0: + raise PaddingError("subcon parsed %d bytes but was allowed only %d" % (position2-position1, length), path=path) + stream_read(stream, pad, path) + return obj + + def _build(self, obj, stream, context, path): + length = evaluate(self.length, context) + if length < 0: + raise PaddingError("length cannot be negative", path=path) + position1 = stream_tell(stream, path) + buildret = self.subcon._build(obj, stream, context, path) + position2 = stream_tell(stream, path) + pad = length - (position2 - position1) + if pad < 0: + raise PaddingError("subcon build %d bytes but was allowed only %d" % (position2-position1, length), path=path) + stream_write(stream, self.pattern * pad, pad, path) + return buildret + + def _sizeof(self, context, path): + try: + length = evaluate(self.length, context) + if length < 0: + raise PaddingError("length cannot be negative", path=path) + return length + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + return f"({self.subcon._compileparse(code)}, io.read(({self.length})-({self.subcon.sizeof()}) ))[0]" + + def _emitbuild(self, code): + return f"({self.subcon._compilebuild(code)}, io.write({repr(self.pattern)}*(({self.length})-({self.subcon.sizeof()})) ))[0]" + + def _emitfulltype(self, ksy, bitwise): + return dict(size=self.length, type=self.subcon._compileprimitivetype(ksy, bitwise)) + + +class Aligned(Subconstruct): + r""" + Appends additional null bytes to achieve a length that is shortest multiple of a modulus. + + Note that subcon can actually be variable size, it is the eventual amount of bytes that is read or written during parsing or building that determines actual padding. + + Parsing first parses subcon, then consumes an amount of bytes to sum up to specified length, and discards it. Building first builds subcon, then writes specified pattern byte to sum up to specified length. Size is subcon size plus modulo remainder, unless SizeofError was raised. + + :param modulus: integer or context lambda, modulus to final length + :param subcon: Construct instance + :param pattern: optional, b-character, padding pattern, default is \\x00 + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises PaddingError: modulus was less than 2 + :raises PaddingError: pattern was not bytes (b-character) + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Aligned(4, Int16ub) + >>> d.parse(b'\x00\x01\x00\x00') + 1 + >>> d.sizeof() + 4 + """ + + def __init__(self, modulus, subcon, pattern=b"\x00"): + if not isinstance(pattern, bytestringtype) or len(pattern) != 1: + raise PaddingError("pattern expected to be bytes character") + super().__init__(subcon) + self.modulus = modulus + self.pattern = pattern + + def _parse(self, stream, context, path): + modulus = evaluate(self.modulus, context) + if modulus < 2: + raise PaddingError("expected modulo 2 or greater", path=path) + position1 = stream_tell(stream, path) + obj = self.subcon._parsereport(stream, context, path) + position2 = stream_tell(stream, path) + pad = -(position2 - position1) % modulus + stream_read(stream, pad, path) + return obj + + def _build(self, obj, stream, context, path): + modulus = evaluate(self.modulus, context) + if modulus < 2: + raise PaddingError("expected modulo 2 or greater", path=path) + position1 = stream_tell(stream, path) + buildret = self.subcon._build(obj, stream, context, path) + position2 = stream_tell(stream, path) + pad = -(position2 - position1) % modulus + stream_write(stream, self.pattern * pad, pad, path) + return buildret + + def _sizeof(self, context, path): + try: + modulus = evaluate(self.modulus, context) + if modulus < 2: + raise PaddingError("expected modulo 2 or greater", path=path) + subconlen = self.subcon._sizeof(context, path) + return subconlen + (-subconlen % modulus) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + def _emitparse(self, code): + return f"({self.subcon._compileparse(code)}, io.read(-({self.subcon.sizeof()}) % ({self.modulus}) ))[0]" + + def _emitbuild(self, code): + return f"({self.subcon._compilebuild(code)}, io.write({repr(self.pattern)}*(-({self.subcon.sizeof()}) % ({self.modulus}))) )[0]" + + +def AlignedStruct(modulus, *subcons, **subconskw): + r""" + Makes a structure where each field is aligned to the same modulus (it is a struct of aligned fields, NOT an aligned struct). + + See :class:`~construct.core.Aligned` and :class:`~construct.core.Struct` for semantics and raisable exceptions. + + :param modulus: integer or context lambda, passed to each member + :param \*subcons: Construct instances, list of members, some can be anonymous + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + Example:: + + >>> d = AlignedStruct(4, "a"/Int8ub, "b"/Int16ub) + >>> d.build(dict(a=0xFF,b=0xFFFF)) + b'\xff\x00\x00\x00\xff\xff\x00\x00' + """ + subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + return Struct(*[sc.name / Aligned(modulus, sc) for sc in subcons]) + + +def BitStruct(*subcons, **subconskw): + r""" + Makes a structure inside a Bitwise. + + See :class:`~construct.core.Bitwise` and :class:`~construct.core.Struct` for semantics and raisable exceptions. + + :param \*subcons: Construct instances, list of members, some can be anonymous + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + + Example:: + + BitStruct <--> Bitwise(Struct(...)) + + >>> d = BitStruct( + ... "a" / Flag, + ... "b" / Nibble, + ... "c" / BitsInteger(10), + ... "d" / Padding(1), + ... ) + >>> d.parse(b"\xbe\xef") + Container(a=True, b=7, c=887, d=None) + >>> d.sizeof() + 2 + """ + return Bitwise(Struct(*subcons, **subconskw)) + + +#=============================================================================== +# stream manipulation +#=============================================================================== +class Pointer(Subconstruct): + r""" + Jumps in the stream forth and back for one field. + + Parsing and building seeks the stream to new location, processes subcon, and seeks back to original location. Size is defined as 0 but that does not mean no bytes are written into the stream. + + Offset can be positive, indicating a position from stream beginning forward, or negative, indicating a position from EOF backwards. + + :param offset: integer or context lambda, positive or negative + :param subcon: Construct instance + :param stream: None to use original stream (default), or context lambda to provide a different stream + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: stream is not seekable and tellable + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Pointer(8, Bytes(1)) + >>> d.parse(b"abcdefghijkl") + b'i' + >>> d.build(b"Z") + b'\x00\x00\x00\x00\x00\x00\x00\x00Z' + """ + + def __init__(self, offset, subcon, stream=None): + super().__init__(subcon) + self.offset = offset + self.stream = stream + + def _parse(self, stream, context, path): + offset = evaluate(self.offset, context) + stream = evaluate(self.stream, context) or stream + fallback = stream_tell(stream, path) + stream_seek(stream, offset, 2 if offset < 0 else 0, path) + obj = self.subcon._parsereport(stream, context, path) + stream_seek(stream, fallback, 0, path) + return obj + + def _build(self, obj, stream, context, path): + offset = evaluate(self.offset, context) + stream = evaluate(self.stream, context) or stream + fallback = stream_tell(stream, path) + stream_seek(stream, offset, 2 if offset < 0 else 0, path) + buildret = self.subcon._build(obj, stream, context, path) + stream_seek(stream, fallback, 0, path) + return buildret + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + code.append(f""" + def parse_pointer(io, offset, func): + fallback = io.tell() + io.seek(offset, 2 if offset < 0 else 0) + obj = func() + io.seek(fallback) + return obj + """) + return f"parse_pointer(io, {self.offset}, lambda: {self.subcon._compileparse(code)})" + + def _emitbuild(self, code): + code.append(f""" + def build_pointer(obj, io, offset, func): + fallback = io.tell() + io.seek(offset, 2 if offset < 0 else 0) + ret = func() + io.seek(fallback) + return ret + """) + return f"build_pointer(obj, io, {self.offset}, lambda: {self.subcon._compilebuild(code)})" + + def _emitprimitivetype(self, ksy, bitwise): + offset = self.offset.__getfield__() if callable(self.offset) else self.offset + name = "instance_%s" % ksy.allocateId() + ksy.instances[name] = dict(pos=offset, **self.subcon._compilefulltype(ksy, bitwise)) + return name + + +class Peek(Subconstruct): + r""" + Peeks at the stream. + + Parsing sub-parses (and returns None if failed), then reverts stream to original position. Building does nothing (its NOT deferred). Size is defined as 0 because there is no building. + + This class is used in :class:`~construct.core.Union` class to parse each member. + + :param subcon: Construct instance + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: stream is not seekable and tellable + + Example:: + + >>> d = Sequence(Peek(Int8ub), Peek(Int16ub)) + >>> d.parse(b"\x01\x02") + [1, 258] + >>> d.sizeof() + 0 + """ + + def __init__(self, subcon): + super().__init__(subcon) + self.flagbuildnone = True + + def _parse(self, stream, context, path): + fallback = stream_tell(stream, path) + try: + return self.subcon._parsereport(stream, context, path) + except ExplicitError: + raise + except ConstructError: + pass + finally: + stream_seek(stream, fallback, 0, path) + + def _build(self, obj, stream, context, path): + return obj + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + code.append(""" + def parse_peek(io, func): + fallback = io.tell() + try: + return func() + except ExplicitError: + raise + except ConstructError: + pass + finally: + io.seek(fallback) + """) + return "parse_peek(io, lambda: %s)" % (self.subcon._compileparse(code),) + + def _emitbuild(self, code): + return "obj" + + +class Seek(Construct): + r""" + Seeks the stream. + + Parsing and building seek the stream to given location (and whence), and return stream.seek() return value. Size is not defined. + + .. seealso:: Analog :class:`~construct.core.Pointer` wrapper that has same side effect but also processes a subcon, and also seeks back. + + :param at: integer or context lambda, where to jump to + :param whence: optional, integer or context lambda, is the offset from beginning (0) or from current position (1) or from EOF (2), default is 0 + + :raises StreamError: stream is not seekable + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = (Seek(5) >> Byte) + >>> d.parse(b"01234x") + [5, 120] + + >>> d = (Bytes(10) >> Seek(5) >> Byte) + >>> d.build([b"0123456789", None, 255]) + b'01234\xff6789' + """ + + def __init__(self, at, whence=0): + super().__init__() + self.at = at + self.whence = whence + self.flagbuildnone = True + + def _parse(self, stream, context, path): + at = evaluate(self.at, context) + whence = evaluate(self.whence, context) + return stream_seek(stream, at, whence, path) + + def _build(self, obj, stream, context, path): + at = evaluate(self.at, context) + whence = evaluate(self.whence, context) + return stream_seek(stream, at, whence, path) + + def _sizeof(self, context, path): + raise SizeofError("Seek only moves the stream, size is not meaningful", path=path) + + def _emitparse(self, code): + return f"io.seek({self.at}, {self.whence})" + + def _emitbuild(self, code): + return f"io.seek({self.at}, {self.whence})" + + +@singleton +class Tell(Construct): + r""" + Tells the stream. + + Parsing and building return current stream offset using using stream.tell(). Size is defined as 0 because parsing and building does not consume or add into the stream. + + Tell is useful for adjusting relative offsets to absolute positions, or to measure sizes of Constructs. To get an absolute pointer, use a Tell plus a relative offset. To get a size, place two Tells and measure their difference using a Compute field. However, its recommended to use :class:`~construct.core.RawCopy` instead of manually extracting two positions and computing difference. + + :raises StreamError: stream is not tellable + + Example:: + + >>> d = Struct("num"/VarInt, "offset"/Tell) + >>> d.parse(b"X") + Container(num=88, offset=1) + >>> d.build(dict(num=88)) + b'X' + """ + + def __init__(self): + super().__init__() + self.flagbuildnone = True + + def _parse(self, stream, context, path): + return stream_tell(stream, path) + + def _build(self, obj, stream, context, path): + return stream_tell(stream, path) + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + return "io.tell()" + + def _emitbuild(self, code): + return "io.tell()" + + +@singleton +class Pass(Construct): + r""" + No-op construct, useful as default cases for Switch and Enum. + + Parsing returns None. Building does nothing. Size is 0 by definition. + + Example:: + + >>> Pass.parse(b"") + None + >>> Pass.build(None) + b'' + >>> Pass.sizeof() + 0 + """ + + def __init__(self): + super().__init__() + self.flagbuildnone = True + + def _parse(self, stream, context, path): + return None + + def _build(self, obj, stream, context, path): + return obj + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + return "None" + + def _emitbuild(self, code): + return "None" + + def _emitfulltype(self, ksy, bitwise): + return dict(size=0) + + +@singleton +class Terminated(Construct): + r""" + Asserts end of stream (EOF). You can use it to ensure no more unparsed data follows in the stream. + + Parsing checks if stream reached EOF, and raises TerminatedError if not. Building does nothing. Size is defined as 0 because parsing and building does not consume or add into the stream, as far as other constructs see it. + + :raises TerminatedError: stream not at EOF when parsing + + Example:: + + >>> Terminated.parse(b"") + None + >>> Terminated.parse(b"remaining") + construct.core.TerminatedError: expected end of stream + """ + + def __init__(self): + super().__init__() + self.flagbuildnone = True + + def _parse(self, stream, context, path): + if stream.read(1): + raise TerminatedError("expected end of stream", path=path) + + def _build(self, obj, stream, context, path): + return obj + + def _sizeof(self, context, path): + raise SizeofError(path=path) + + +#=============================================================================== +# tunneling and byte/bit swapping +#=============================================================================== +class RawCopy(Subconstruct): + r""" + Used to obtain byte representation of a field (aside of object value). + + Returns a dict containing both parsed subcon value, the raw bytes that were consumed by subcon, starting and ending offset in the stream, and amount in bytes. Builds either from raw bytes representation or a value used by subcon. Size is same as subcon. + + Object is a dictionary with either "data" or "value" keys, or both. + + When building, if both the "value" and "data" keys are present, then the "data" key is used and the "value" key is ignored. This is undesirable in the case that you parse some data for the purpose of modifying it and writing it back; in this case, delete the "data" key when modifying the "value" key to correctly rebuild the former. + + :param subcon: Construct instance + + :raises StreamError: stream is not seekable and tellable + :raises RawCopyError: building and neither data or value was given + :raises StringError: building from non-bytes value, perhaps unicode + + Example:: + + >>> d = RawCopy(Byte) + >>> d.parse(b"\xff") + Container(data=b'\xff', value=255, offset1=0, offset2=1, length=1) + >>> d.build(dict(data=b"\xff")) + '\xff' + >>> d.build(dict(value=255)) + '\xff' + """ + + def _parse(self, stream, context, path): + offset1 = stream_tell(stream, path) + obj = self.subcon._parsereport(stream, context, path) + offset2 = stream_tell(stream, path) + stream_seek(stream, offset1, 0, path) + data = stream_read(stream, offset2-offset1, path) + return Container(data=data, value=obj, offset1=offset1, offset2=offset2, length=(offset2-offset1)) + + def _build(self, obj, stream, context, path): + if obj is None and self.subcon.flagbuildnone: + obj = dict(value=None) + if 'data' in obj: + data = obj['data'] + offset1 = stream_tell(stream, path) + stream_write(stream, data, len(data), path) + offset2 = stream_tell(stream, path) + return Container(obj, data=data, offset1=offset1, offset2=offset2, length=(offset2-offset1)) + if 'value' in obj: + value = obj['value'] + offset1 = stream_tell(stream, path) + buildret = self.subcon._build(value, stream, context, path) + value = value if buildret is None else buildret + offset2 = stream_tell(stream, path) + stream_seek(stream, offset1, 0, path) + data = stream_read(stream, offset2-offset1, path) + return Container(obj, data=data, value=value, offset1=offset1, offset2=offset2, length=(offset2-offset1)) + raise RawCopyError('RawCopy cannot build, both data and value keys are missing', path=path) + + +def ByteSwapped(subcon): + r""" + Swaps the byte order within boundaries of given subcon. Requires a fixed sized subcon. + + :param subcon: Construct instance, subcon on top of byte swapped bytes + + :raises SizeofError: ctor or compiler could not compute subcon size + + See :class:`~construct.core.Transformed` and :class:`~construct.core.Restreamed` for raisable exceptions. + + Example:: + + Int24ul <--> ByteSwapped(Int24ub) <--> BytesInteger(3, swapped=True) <--> ByteSwapped(BytesInteger(3)) + """ + + size = subcon.sizeof() + return Transformed(subcon, swapbytes, size, swapbytes, size) + + +def BitsSwapped(subcon): + r""" + Swaps the bit order within each byte within boundaries of given subcon. Does NOT require a fixed sized subcon. + + :param subcon: Construct instance, subcon on top of bit swapped bytes + + :raises SizeofError: compiler could not compute subcon size + + See :class:`~construct.core.Transformed` and :class:`~construct.core.Restreamed` for raisable exceptions. + + Example:: + + >>> d = Bitwise(Bytes(8)) + >>> d.parse(b"\x01") + '\x00\x00\x00\x00\x00\x00\x00\x01' + >>>> BitsSwapped(d).parse(b"\x01") + '\x01\x00\x00\x00\x00\x00\x00\x00' + """ + + try: + size = subcon.sizeof() + return Transformed(subcon, swapbitsinbytes, size, swapbitsinbytes, size) + except SizeofError: + return Restreamed(subcon, swapbitsinbytes, 1, swapbitsinbytes, 1, lambda n: n) + + +class Prefixed(Subconstruct): + r""" + Prefixes a field with byte count. + + Parses the length field. Then reads that amount of bytes, and parses subcon using only those bytes. Constructs that consume entire remaining stream are constrained to consuming only the specified amount of bytes (a substream). When building, data gets prefixed by its length. Optionally, length field can include its own size. Size is the sum of both fields sizes, unless either raises SizeofError. + + Analog to :class:`~construct.core.PrefixedArray` which prefixes with an element count, instead of byte count. Semantics is similar but implementation is different. + + :class:`~construct.core.VarInt` is recommended for new protocols, as it is more compact and never overflows. + + :param lengthfield: Construct instance, field used for storing the length + :param subcon: Construct instance, subcon used for storing the value + :param includelength: optional, bool, whether length field should include its own size, default is False + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + + Example:: + + >>> d = Prefixed(VarInt, GreedyRange(Int32ul)) + >>> d.parse(b"\x08abcdefgh") + [1684234849, 1751606885] + + >>> d = PrefixedArray(VarInt, Int32ul) + >>> d.parse(b"\x02abcdefgh") + [1684234849, 1751606885] + """ + + def __init__(self, lengthfield, subcon, includelength=False): + super().__init__(subcon) + self.lengthfield = lengthfield + self.includelength = includelength + + def _parse(self, stream, context, path): + length = self.lengthfield._parsereport(stream, context, path) + if self.includelength: + length -= self.lengthfield._sizeof(context, path) + data = stream_read(stream, length, path) + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + stream2 = io.BytesIO() + buildret = self.subcon._build(obj, stream2, context, path) + data = stream2.getvalue() + length = len(data) + if self.includelength: + length += self.lengthfield._sizeof(context, path) + self.lengthfield._build(length, stream, context, path) + stream_write(stream, data, len(data), path) + return buildret + + def _sizeof(self, context, path): + return self.lengthfield._sizeof(context, path) + self.subcon._sizeof(context, path) + + def _actualsize(self, stream, context, path): + position1 = stream_tell(stream, path) + length = self.lengthfield._parse(stream, context, path) + if self.includelength: + length -= self.lengthfield._sizeof(context, path) + position2 = stream_tell(stream, path) + return (position2-position1) + length + + def _emitparse(self, code): + sub = self.lengthfield.sizeof() if self.includelength else 0 + return f"restream(io.read(({self.lengthfield._compileparse(code)})-({sub})), lambda io: ({self.subcon._compileparse(code)}))" + + def _emitseq(self, ksy, bitwise): + return [ + dict(id="lengthfield", type=self.lengthfield._compileprimitivetype(ksy, bitwise)), + dict(id="data", size="lengthfield", type=self.subcon._compileprimitivetype(ksy, bitwise)), + ] + + +def PrefixedArray(countfield, subcon): + r""" + Prefixes an array with item count (as opposed to prefixed by byte count, see :class:`~construct.core.Prefixed`). + + :class:`~construct.core.VarInt` is recommended for new protocols, as it is more compact and never overflows. + + :param countfield: Construct instance, field used for storing the element count + :param subcon: Construct instance, subcon used for storing each element + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises RangeError: consumed or produced too little elements + + Example:: + + >>> d = Prefixed(VarInt, GreedyRange(Int32ul)) + >>> d.parse(b"\x08abcdefgh") + [1684234849, 1751606885] + + >>> d = PrefixedArray(VarInt, Int32ul) + >>> d.parse(b"\x02abcdefgh") + [1684234849, 1751606885] + """ + macro = FocusedSeq("items", + "count" / Rebuild(countfield, len_(this.items)), + "items" / subcon[this.count], + ) + + def _emitparse(code): + return "ListContainer((%s) for i in range(%s))" % (subcon._compileparse(code), countfield._compileparse(code), ) + macro._emitparse = _emitparse + + def _emitbuild(code): + return f"(reuse(len(obj), lambda obj: {countfield._compilebuild(code)}), list({subcon._compilebuild(code)} for obj in obj), obj)[2]" + macro._emitbuild = _emitbuild + + def _actualsize(self, stream, context, path): + position1 = stream_tell(stream, path) + count = countfield._parse(stream, context, path) + position2 = stream_tell(stream, path) + return (position2-position1) + count * subcon._sizeof(context, path) + macro._actualsize = _actualsize + + def _emitseq(ksy, bitwise): + return [ + dict(id="countfield", type=countfield._compileprimitivetype(ksy, bitwise)), + dict(id="data", type=subcon._compileprimitivetype(ksy, bitwise), repeat="expr", repeat_expr="countfield"), + ] + macro._emitseq = _emitseq + + return macro + + +class FixedSized(Subconstruct): + r""" + Restricts parsing to specified amount of bytes. + + Parsing reads `length` bytes, then defers to subcon using new BytesIO with said bytes. Building builds the subcon using new BytesIO, then writes said data and additional null bytes accordingly. Size is same as `length`, although negative amount raises an error. + + :param length: integer or context lambda, total amount of bytes (both data and padding) + :param subcon: Construct instance + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises PaddingError: length is negative + :raises PaddingError: subcon written more bytes than entire length (negative padding) + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = FixedSized(10, Byte) + >>> d.parse(b'\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00') + 255 + >>> d.build(255) + b'\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00' + >>> d.sizeof() + 10 + """ + + def __init__(self, length, subcon): + super().__init__(subcon) + self.length = length + + def _parse(self, stream, context, path): + length = evaluate(self.length, context) + if length < 0: + raise PaddingError("length cannot be negative", path=path) + data = stream_read(stream, length, path) + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + length = evaluate(self.length, context) + if length < 0: + raise PaddingError("length cannot be negative", path=path) + stream2 = io.BytesIO() + buildret = self.subcon._build(obj, stream2, context, path) + data = stream2.getvalue() + pad = length - len(data) + if pad < 0: + raise PaddingError("subcon build %d bytes but was allowed only %d" % (len(data), length), path=path) + stream_write(stream, data, len(data), path) + stream_write(stream, bytes(pad), pad, path) + return buildret + + def _sizeof(self, context, path): + length = evaluate(self.length, context) + if length < 0: + raise PaddingError("length cannot be negative", path=path) + return length + + def _emitparse(self, code): + return f"restream(io.read({self.length}), lambda io: ({self.subcon._compileparse(code)}))" + + def _emitfulltype(self, ksy, bitwise): + return dict(size=repr(self.length).replace("this.",""), **self.subcon._compilefulltype(ksy, bitwise)) + + +class NullTerminated(Subconstruct): + r""" + Restricts parsing to bytes preceding a null byte. + + Parsing reads one byte at a time and accumulates it with previous bytes. When term was found, (by default) consumes but discards the term. When EOF was found, (by default) raises same StreamError exception. Then subcon is parsed using new BytesIO made with said data. Building builds the subcon and then writes the term. Size is undefined. + + The term can be multiple bytes, to support string classes with UTF16/32 encodings. + + :param subcon: Construct instance + :param term: optional, bytes, terminator byte-string, default is \x00 single null byte + :param include: optional, bool, if to include terminator in resulting data, default is False + :param consume: optional, bool, if to consume terminator or leave it in the stream, default is True + :param require: optional, bool, if EOF results in failure or not, default is True + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: encountered EOF but require is not disabled + :raises PaddingError: terminator is less than 1 bytes in length + + Example:: + + >>> d = NullTerminated(Byte) + >>> d.parse(b'\xff\x00') + 255 + >>> d.build(255) + b'\xff\x00' + """ + + def __init__(self, subcon, term=b"\x00", include=False, consume=True, require=True): + super().__init__(subcon) + self.term = term + self.include = include + self.consume = consume + self.require = require + + def _parse(self, stream, context, path): + term = self.term + unit = len(term) + if unit < 1: + raise PaddingError("NullTerminated term must be at least 1 byte", path=path) + data = b'' + while True: + try: + b = stream_read(stream, unit, path) + except StreamError: + if self.require: + raise + else: + break + if b == term: + if self.include: + data += b + if not self.consume: + stream_seek(stream, -unit, 1, path) + break + data += b + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + buildret = self.subcon._build(obj, stream, context, path) + stream_write(stream, self.term, len(self.term), path) + return buildret + + def _sizeof(self, context, path): + raise SizeofError(path=path) + + def _emitfulltype(self, ksy, bitwise): + if len(self.term) > 1: + raise NotImplementedError + return dict(terminator=byte2int(self.term), include=self.include, consume=self.consume, eos_error=self.require, **self.subcon._compilefulltype(ksy, bitwise)) + + +class NullStripped(Subconstruct): + r""" + Restricts parsing to bytes except padding left of EOF. + + Parsing reads entire stream, then strips the data from right to left of null bytes, then parses subcon using new BytesIO made of said data. Building defers to subcon as-is. Size is undefined, because it reads till EOF. + + The pad can be multiple bytes, to support string classes with UTF16/32 encodings. + + :param subcon: Construct instance + :param pad: optional, bytes, padding byte-string, default is \x00 single null byte + + :raises PaddingError: pad is less than 1 bytes in length + + Example:: + + >>> d = NullStripped(Byte) + >>> d.parse(b'\xff\x00\x00') + 255 + >>> d.build(255) + b'\xff' + """ + + def __init__(self, subcon, pad=b"\x00"): + super().__init__(subcon) + self.pad = pad + + def _parse(self, stream, context, path): + pad = self.pad + unit = len(pad) + if unit < 1: + raise PaddingError("NullStripped pad must be at least 1 byte", path=path) + data = stream_read_entire(stream, path) + if unit == 1: + data = data.rstrip(pad) + else: + tailunit = len(data) % unit + end = len(data) + if tailunit and data[-tailunit:] == pad[:tailunit]: + end -= tailunit + while end-unit >= 0 and data[end-unit:end] == pad: + end -= unit + data = data[:end] + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + return self.subcon._build(obj, stream, context, path) + + def _sizeof(self, context, path): + raise SizeofError(path=path) + + def _emitfulltype(self, ksy, bitwise): + if len(self.pad) > 1: + raise NotImplementedError + return dict(pad_right=byte2int(self.pad), **self.subcon._compilefulltype(ksy, bitwise)) + + +class RestreamData(Subconstruct): + r""" + Parses a field on external data (but does not build). + + Parsing defers to subcon, but provides it a separate BytesIO stream based on data provided by datafunc (a bytes literal or another BytesIO stream or Construct instances that returns bytes or context lambda). Building does nothing. Size is 0 because as far as other fields see it, this field does not produce or consume any bytes from the stream. + + :param datafunc: bytes or BytesIO or Construct instance (that parses into bytes) or context lambda, provides data for subcon to parse from + :param subcon: Construct instance + + Can propagate any exception from the lambdas, possibly non-ConstructError. + + Example:: + + >>> d = RestreamData(b"\x01", Int8ub) + >>> d.parse(b"") + 1 + >>> d.build(0) + b'' + + >>> d = RestreamData(NullTerminated(GreedyBytes), Int16ub) + >>> d.parse(b"\x01\x02\x00") + 0x0102 + >>> d = RestreamData(FixedSized(2, GreedyBytes), Int16ub) + >>> d.parse(b"\x01\x02\x00") + 0x0102 + """ + + def __init__(self, datafunc, subcon): + super().__init__(subcon) + self.datafunc = datafunc + self.flagbuildnone = True + + def _parse(self, stream, context, path): + data = evaluate(self.datafunc, context) + if isinstance(data, bytestringtype): + stream2 = io.BytesIO(data) + if isinstance(data, io.BytesIO): + stream2 = data + if isinstance(data, Construct): + stream2 = io.BytesIO(data._parsereport(stream, context, path)) + return self.subcon._parsereport(stream2, context, path) + + def _build(self, obj, stream, context, path): + return obj + + def _sizeof(self, context, path): + return 0 + + def _emitparse(self, code): + return "restream(%r, lambda io: %s)" % (self.datafunc, self.subcon._compileparse(code), ) + + +class Transformed(Subconstruct): + r""" + Transforms bytes between the underlying stream and the (fixed-sized) subcon. + + Parsing reads a specified amount (or till EOF), processes data using a bytes-to-bytes decoding function, then parses subcon using those data. Building does build subcon into separate bytes, then processes it using encoding bytes-to-bytes function, then writes those data into main stream. Size is reported as `decodeamount` or `encodeamount` if those are equal, otherwise its SizeofError. + + Used internally to implement :class:`~construct.core.Bitwise` :class:`~construct.core.Bytewise` :class:`~construct.core.ByteSwapped` :class:`~construct.core.BitsSwapped` . + + Possible use-cases include encryption, obfuscation, byte-level encoding. + + .. warning:: Remember that subcon must consume (or produce) an amount of bytes that is same as `decodeamount` (or `encodeamount`). + + .. warning:: Do NOT use seeking/telling classes inside Transformed context. + + :param subcon: Construct instance + :param decodefunc: bytes-to-bytes function, applied before parsing subcon + :param decodeamount: integer, amount of bytes to read + :param encodefunc: bytes-to-bytes function, applied after building subcon + :param encodeamount: integer, amount of bytes to write + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: subcon build and encoder transformed more or less than `encodeamount` bytes, if amount is specified + :raises StringError: building from non-bytes value, perhaps unicode + + Can propagate any exception from the lambdas, possibly non-ConstructError. + + Example:: + + >>> d = Transformed(Bytes(16), bytes2bits, 2, bits2bytes, 2) + >>> d.parse(b"\x00\x00") + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + + >>> d = Transformed(GreedyBytes, bytes2bits, None, bits2bytes, None) + >>> d.parse(b"\x00\x00") + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + """ + + def __init__(self, subcon, decodefunc, decodeamount, encodefunc, encodeamount): + super().__init__(subcon) + self.decodefunc = decodefunc + self.decodeamount = decodeamount + self.encodefunc = encodefunc + self.encodeamount = encodeamount + + def _parse(self, stream, context, path): + if isinstance(self.decodeamount, type(None)): + data = stream_read_entire(stream, path) + if isinstance(self.decodeamount, integertypes): + data = stream_read(stream, self.decodeamount, path) + data = self.decodefunc(data) + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + stream2 = io.BytesIO() + buildret = self.subcon._build(obj, stream2, context, path) + data = stream2.getvalue() + data = self.encodefunc(data) + if isinstance(self.encodeamount, integertypes): + if len(data) != self.encodeamount: + raise StreamError("encoding transformation produced wrong amount of bytes, %s instead of expected %s" % (len(data), self.encodeamount, ), path=path) + stream_write(stream, data, len(data), path) + return buildret + + def _sizeof(self, context, path): + if self.decodeamount is None or self.encodeamount is None: + raise SizeofError(path=path) + if self.decodeamount == self.encodeamount: + return self.encodeamount + raise SizeofError(path=path) + + +class Restreamed(Subconstruct): + r""" + Transforms bytes between the underlying stream and the (variable-sized) subcon. + + Used internally to implement :class:`~construct.core.Bitwise` :class:`~construct.core.Bytewise` :class:`~construct.core.ByteSwapped` :class:`~construct.core.BitsSwapped` . + + .. warning:: Remember that subcon must consume or produce an amount of bytes that is a multiple of encoding or decoding units. For example, in a Bitwise context you should process a multiple of 8 bits or the stream will fail during parsing/building. + + .. warning:: Do NOT use seeking/telling classes inside Restreamed context. + + :param subcon: Construct instance + :param decoder: bytes-to-bytes function, used on data chunks when parsing + :param decoderunit: integer, decoder takes chunks of this size + :param encoder: bytes-to-bytes function, used on data chunks when building + :param encoderunit: integer, encoder takes chunks of this size + :param sizecomputer: function that computes amount of bytes outputed + + Can propagate any exception from the lambda, possibly non-ConstructError. + Can also raise arbitrary exceptions in RestreamedBytesIO implementation. + + Example:: + + Bitwise <--> Restreamed(subcon, bits2bytes, 8, bytes2bits, 1, lambda n: n//8) + Bytewise <--> Restreamed(subcon, bytes2bits, 1, bits2bytes, 8, lambda n: n*8) + """ + + def __init__(self, subcon, decoder, decoderunit, encoder, encoderunit, sizecomputer): + super().__init__(subcon) + self.decoder = decoder + self.decoderunit = decoderunit + self.encoder = encoder + self.encoderunit = encoderunit + self.sizecomputer = sizecomputer + + def _parse(self, stream, context, path): + stream2 = RestreamedBytesIO(stream, self.decoder, self.decoderunit, self.encoder, self.encoderunit) + obj = self.subcon._parsereport(stream2, context, path) + stream2.close() + return obj + + def _build(self, obj, stream, context, path): + stream2 = RestreamedBytesIO(stream, self.decoder, self.decoderunit, self.encoder, self.encoderunit) + buildret = self.subcon._build(obj, stream2, context, path) + stream2.close() + return obj + + def _sizeof(self, context, path): + if self.sizecomputer is None: + raise SizeofError("Restreamed cannot calculate size without a sizecomputer", path=path) + else: + return self.sizecomputer(self.subcon._sizeof(context, path)) + + +class ProcessXor(Subconstruct): + r""" + Transforms bytes between the underlying stream and the subcon. + + Used internally by KaitaiStruct compiler, when translating `process: xor` tags. + + Parsing reads till EOF, xors data with the pad, then feeds that data into subcon. Building first builds the subcon into separate BytesIO stream, xors data with the pad, then writes that data into the main stream. Size is the same as subcon, unless it raises SizeofError. + + :param padfunc: integer or bytes or context lambda, single or multiple bytes to xor data with + :param subcon: Construct instance + + :raises StringError: pad is not integer or bytes + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = ProcessXor(0xf0 or b'\xf0', Int16ub) + >>> d.parse(b"\x00\xff") + 0xf00f + >>> d.sizeof() + 2 + """ + + def __init__(self, padfunc, subcon): + super().__init__(subcon) + self.padfunc = padfunc + + def _parse(self, stream, context, path): + pad = evaluate(self.padfunc, context) + if not isinstance(pad, (integertypes, bytestringtype)): + raise StringError("ProcessXor needs integer or bytes pad", path=path) + if isinstance(pad, bytestringtype) and len(pad) == 1: + pad = byte2int(pad) + data = stream_read_entire(stream, path) + if isinstance(pad, integertypes): + if not (pad == 0): + data = integers2bytes( (b ^ pad) for b in data ) + if isinstance(pad, bytestringtype): + if not (len(pad) <= 64 and pad == bytes(len(pad))): + data = integers2bytes( (b ^ p) for b,p in zip(data, itertools.cycle(pad)) ) + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + pad = evaluate(self.padfunc, context) + if not isinstance(pad, (integertypes, bytestringtype)): + raise StringError("ProcessXor needs integer or bytes pad", path=path) + if isinstance(pad, bytestringtype) and len(pad) == 1: + pad = byte2int(pad) + stream2 = io.BytesIO() + buildret = self.subcon._build(obj, stream2, context, path) + data = stream2.getvalue() + if isinstance(pad, integertypes): + if not (pad == 0): + data = integers2bytes( (b ^ pad) for b in data ) + if isinstance(pad, bytestringtype): + if not (len(pad) <= 64 and pad == bytes(len(pad))): + data = integers2bytes( (b ^ p) for b,p in zip(data, itertools.cycle(pad)) ) + stream_write(stream, data, len(data), path) + return buildret + + def _sizeof(self, context, path): + return self.subcon._sizeof(context, path) + + +class ProcessRotateLeft(Subconstruct): + r""" + Transforms bytes between the underlying stream and the subcon. + + Used internally by KaitaiStruct compiler, when translating `process: rol/ror` tags. + + Parsing reads till EOF, rotates (shifts) the data *left* by amount in bits, then feeds that data into subcon. Building first builds the subcon into separate BytesIO stream, rotates *right* by negating amount, then writes that data into the main stream. Size is the same as subcon, unless it raises SizeofError. + + :param amount: integer or context lambda, shift by this amount in bits, treated modulo (group x 8) + :param group: integer or context lambda, shifting is applied to chunks of this size in bytes + :param subcon: Construct instance + + :raises RotationError: group is less than 1 + :raises RotationError: data length is not a multiple of group size + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = ProcessRotateLeft(4, 1, Int16ub) + >>> d.parse(b'\x0f\xf0') + 0xf00f + >>> d = ProcessRotateLeft(4, 2, Int16ub) + >>> d.parse(b'\x0f\xf0') + 0xff00 + >>> d.sizeof() + 2 + """ + + # formula taken from: http://stackoverflow.com/a/812039 + precomputed_single_rotations = {amount: [(i << amount) & 0xff | (i >> (8-amount)) for i in range(256)] for amount in range(1,8)} + + def __init__(self, amount, group, subcon): + super().__init__(subcon) + self.amount = amount + self.group = group + + def _parse(self, stream, context, path): + amount = evaluate(self.amount, context) + group = evaluate(self.group, context) + if group < 1: + raise RotationError("group size must be at least 1 to be valid", path=path) + + amount = amount % (group * 8) + amount_bytes = amount // 8 + data = stream_read_entire(stream, path) + data_ints = bytes2integers(data) + + if len(data) % group != 0: + raise RotationError("data length must be a multiple of group size", path=path) + + if amount == 0: + pass + + elif group == 1: + translate = ProcessRotateLeft.precomputed_single_rotations[amount] + data = integers2bytes( translate[a] for a in data_ints ) + + elif amount % 8 == 0: + indices = [(i + amount_bytes) % group for i in range(group)] + data = integers2bytes( data_ints[i+k] for i in range(0,len(data),group) for k in indices ) + + else: + amount1 = amount % 8 + amount2 = 8 - amount1 + indices_pairs = [ ((i+amount_bytes) % group, (i+1+amount_bytes) % group) for i in range(group)] + data = integers2bytes( (data_ints[i+k1] << amount1) & 0xff | (data_ints[i+k2] >> amount2) for i in range(0,len(data),group) for k1,k2 in indices_pairs ) + + if self.subcon is GreedyBytes: + return data + if type(self.subcon) is GreedyString: + return data.decode(self.subcon.encoding) + return self.subcon._parsereport(io.BytesIO(data), context, path) + + def _build(self, obj, stream, context, path): + amount = evaluate(self.amount, context) + group = evaluate(self.group, context) + if group < 1: + raise RotationError("group size must be at least 1 to be valid", path=path) + + amount = -amount % (group * 8) + amount_bytes = amount // 8 + stream2 = io.BytesIO() + buildret = self.subcon._build(obj, stream2, context, path) + data = stream2.getvalue() + data_ints = bytes2integers(data) + + if len(data) % group != 0: + raise RotationError("data length must be a multiple of group size", path=path) + + if amount == 0: + pass + + elif group == 1: + translate = ProcessRotateLeft.precomputed_single_rotations[amount] + data = integers2bytes( translate[a] for a in data_ints ) + + elif amount % 8 == 0: + indices = [(i + amount_bytes) % group for i in range(group)] + data = integers2bytes( data_ints[i+k] for i in range(0,len(data),group) for k in indices ) + + else: + amount1 = amount % 8 + amount2 = 8 - amount1 + indices_pairs = [ ((i+amount_bytes) % group, (i+1+amount_bytes) % group) for i in range(group)] + data = integers2bytes( (data_ints[i+k1] << amount1) & 0xff | (data_ints[i+k2] >> amount2) for i in range(0,len(data),group) for k1,k2 in indices_pairs ) + + stream_write(stream, data, len(data), path) + return buildret + + def _sizeof(self, context, path): + return self.subcon._sizeof(context, path) + + +class Checksum(Construct): + r""" + Field that is build or validated by a hash of a given byte range. Usually used with :class:`~construct.core.RawCopy` . + + Parsing compares parsed subcon `checksumfield` with a context entry provided by `bytesfunc` and transformed by `hashfunc`. Building fetches the contect entry, transforms it, then writes is using subcon. Size is same as subcon. + + :param checksumfield: a subcon field that reads the checksum, usually Bytes(int) + :param hashfunc: function that takes bytes and returns whatever checksumfield takes when building, usually from hashlib module + :param bytesfunc: context lambda that returns bytes (or object) to be hashed, usually like this.rawcopy1.data + + :raises ChecksumError: parsing and actual checksum does not match actual data + + Can propagate any exception from the lambdas, possibly non-ConstructError. + + Example:: + + import hashlib + d = Struct( + "fields" / RawCopy(Struct( + Padding(1000), + )), + "checksum" / Checksum(Bytes(64), + lambda data: hashlib.sha512(data).digest(), + this.fields.data), + ) + d.build(dict(fields=dict(value={}))) + + :: + + import hashlib + d = Struct( + "offset" / Tell, + "checksum" / Padding(64), + "fields" / RawCopy(Struct( + Padding(1000), + )), + "checksum" / Pointer(this.offset, Checksum(Bytes(64), + lambda data: hashlib.sha512(data).digest(), + this.fields.data)), + ) + d.build(dict(fields=dict(value={}))) + """ + + def __init__(self, checksumfield, hashfunc, bytesfunc): + super().__init__() + self.checksumfield = checksumfield + self.hashfunc = hashfunc + self.bytesfunc = bytesfunc + self.flagbuildnone = True + + def _parse(self, stream, context, path): + hash1 = self.checksumfield._parsereport(stream, context, path) + hash2 = self.hashfunc(self.bytesfunc(context)) + if hash1 != hash2: + raise ChecksumError( + "wrong checksum, read %r, computed %r" % ( + hash1 if not isinstance(hash1,bytestringtype) else binascii.hexlify(hash1), + hash2 if not isinstance(hash2,bytestringtype) else binascii.hexlify(hash2), ), + path=path + ) + return hash1 + + def _build(self, obj, stream, context, path): + hash2 = self.hashfunc(self.bytesfunc(context)) + self.checksumfield._build(hash2, stream, context, path) + return hash2 + + def _sizeof(self, context, path): + return self.checksumfield._sizeof(context, path) + + +class Compressed(Tunnel): + r""" + Compresses and decompresses underlying stream when processing subcon. When parsing, entire stream is consumed. When building, it puts compressed bytes without marking the end. This construct should be used with :class:`~construct.core.Prefixed` . + + Parsing and building transforms all bytes using a specified codec. Since data is processed until EOF, it behaves similar to `GreedyBytes`. Size is undefined. + + :param subcon: Construct instance, subcon used for storing the value + :param encoding: string, any of module names like zlib/gzip/bzip2/lzma, otherwise any of codecs module bytes<->bytes encodings, each codec usually requires some Python version + :param level: optional, integer between 0..9, although lzma discards it, some encoders allow different compression levels + + :raises ImportError: needed module could not be imported by ctor + :raises StreamError: stream failed when reading until EOF + + Example:: + + >>> d = Prefixed(VarInt, Compressed(GreedyBytes, "zlib")) + >>> d.build(bytes(100)) + b'\x0cx\x9cc`\xa0=\x00\x00\x00d\x00\x01' + >>> len(_) + 13 + """ + + def __init__(self, subcon, encoding, level=None): + super().__init__(subcon) + self.encoding = encoding + self.level = level + if self.encoding == "zlib": + import zlib + self.lib = zlib + elif self.encoding == "gzip": + import gzip + self.lib = gzip + elif self.encoding == "bzip2": + import bz2 + self.lib = bz2 + elif self.encoding == "lzma": + import lzma + self.lib = lzma + else: + import codecs + self.lib = codecs + + def _decode(self, data, context, path): + if self.encoding in ("zlib", "gzip", "bzip2", "lzma"): + return self.lib.decompress(data) + return self.lib.decode(data, self.encoding) + + def _encode(self, data, context, path): + if self.encoding in ("zlib", "gzip", "bzip2", "lzma"): + if self.level is None or self.encoding == "lzma": + return self.lib.compress(data) + else: + return self.lib.compress(data, self.level) + return self.lib.encode(data, self.encoding) + + +class CompressedLZ4(Tunnel): + r""" + Compresses and decompresses underlying stream before processing subcon. When parsing, entire stream is consumed. When building, it puts compressed bytes without marking the end. This construct should be used with :class:`~construct.core.Prefixed` . + + Parsing and building transforms all bytes using LZ4 library. Since data is processed until EOF, it behaves similar to `GreedyBytes`. Size is undefined. + + :param subcon: Construct instance, subcon used for storing the value + + :raises ImportError: needed module could not be imported by ctor + :raises StreamError: stream failed when reading until EOF + + Can propagate lz4.frame exceptions. + + Example:: + + >>> d = Prefixed(VarInt, CompressedLZ4(GreedyBytes)) + >>> d.build(bytes(100)) + b'"\x04"M\x18h@d\x00\x00\x00\x00\x00\x00\x00#\x0b\x00\x00\x00\x1f\x00\x01\x00KP\x00\x00\x00\x00\x00\x00\x00\x00\x00' + >>> len(_) + 35 + """ + + def __init__(self, subcon): + super().__init__(subcon) + import lz4.frame + self.lib = lz4.frame + + def _decode(self, data, context, path): + return self.lib.decompress(data) + + def _encode(self, data, context, path): + return self.lib.compress(data) + + +class Rebuffered(Subconstruct): + r""" + Caches bytes from underlying stream, so it becomes seekable and tellable, and also becomes blocking on reading. Useful for processing non-file streams like pipes, sockets, etc. + + .. warning:: Experimental implementation. May not be mature enough. + + :param subcon: Construct instance, subcon which will operate on the buffered stream + :param tailcutoff: optional, integer, amount of bytes kept in buffer, by default buffers everything + + Can also raise arbitrary exceptions in its implementation. + + Example:: + + Rebuffered(..., tailcutoff=1024).parse_stream(nonseekable_stream) + """ + + def __init__(self, subcon, tailcutoff=None): + super().__init__(subcon) + self.stream2 = RebufferedBytesIO(None, tailcutoff=tailcutoff) + + def _parse(self, stream, context, path): + self.stream2.substream = stream + return self.subcon._parsereport(self.stream2, context, path) + + def _build(self, obj, stream, context, path): + self.stream2.substream = stream + return self.subcon._build(obj, self.stream2, context, path) + + +#=============================================================================== +# lazy equivalents +#=============================================================================== +class Lazy(Subconstruct): + r""" + Lazyfies a field. + + This wrapper allows you to do lazy parsing of individual fields inside a normal Struct (without using LazyStruct which may not work in every scenario). It is also used by KaitaiStruct compiler to emit `instances` because those are not processed greedily, and they may refer to other not yet parsed fields. Those are 2 entirely different applications but semantics are the same. + + Parsing saves the current stream offset and returns a lambda. If and when that lambda gets evaluated, it seeks the stream to then-current position, parses the subcon, and seeks the stream back to previous position. Building evaluates that lambda into an object (if needed), then defers to subcon. Size also defers to subcon. + + :param subcon: Construct instance + + :raises StreamError: requested reading negative amount, could not read enough bytes, requested writing different amount than actual data, or could not write all bytes + :raises StreamError: stream is not seekable and tellable + + Example:: + + >>> d = Lazy(Byte) + >>> x = d.parse(b'\x00') + >>> x + .execute> + >>> x() + 0 + >>> d.build(0) + b'\x00' + >>> d.build(x) + b'\x00' + >>> d.sizeof() + 1 + """ + + def __init__(self, subcon): + super().__init__(subcon) + + def _parse(self, stream, context, path): + offset = stream_tell(stream, path) + def execute(): + fallback = stream_tell(stream, path) + stream_seek(stream, offset, 0, path) + obj = self.subcon._parsereport(stream, context, path) + stream_seek(stream, fallback, 0, path) + return obj + len = self.subcon._actualsize(self, context, path) + stream_seek(stream, len, 1, path) + return execute + + def _build(self, obj, stream, context, path): + if callable(obj): + obj = obj() + return self.subcon._build(obj, stream, context, path) + + +class LazyContainer(dict): + """Used internally.""" + + def __init__(self, struct, stream, offsets, values, context, path): + self._struct = struct + self._stream = stream + self._offsets = offsets + self._values = values + self._context = context + self._path = path + + def __getattr__(self, name): + if name in self._struct._subconsindexes: + return self[name] + raise AttributeError + + def __getitem__(self, index): + if isinstance(index, stringtypes): + index = self._struct._subconsindexes[index] # KeyError + if index in self._values: + return self._values[index] + stream_seek(self._stream, self._offsets[index], 0, self._path) # KeyError + parseret = self._struct.subcons[index]._parsereport(self._stream, self._context, self._path) + self._values[index] = parseret + return parseret + + def __len__(self): + return len(self._struct.subcons) + + def keys(self): + return iter(self._struct._subcons) + + def values(self): + return (self[k] for k in self._struct._subcons) + + def items(self): + return ((k, self[k]) for k in self._struct._subcons) + + __iter__ = keys + + def __eq__(self, other): + return Container.__eq__(self, other) + + def __repr__(self): + return "" % (len(self._values), len(self._struct.subcons), ) + + +class LazyStruct(Construct): + r""" + Equivalent to :class:`~construct.core.Struct`, but when this class is parsed, most fields are not parsed (they are skipped if their size can be measured by _actualsize or _sizeof method). See its docstring for details. + + Fields are parsed depending on some factors: + + * Some fields like Int* Float* Bytes(5) Array(5,Byte) Pointer are fixed-size and are therefore skipped. Stream is not read. + * Some fields like Bytes(this.field) are variable-size but their size is known during parsing when there is a corresponding context entry. Those fields are also skipped. Stream is not read. + * Some fields like Prefixed PrefixedArray PascalString are variable-size but their size can be computed by partially reading the stream. Only first few bytes are read (the lengthfield). + * Other fields like VarInt need to be parsed. Stream position that is left after the field was parsed is used. + * Some fields may not work properly, due to the fact that this class attempts to skip fields, and parses them only out of necessity. Miscellaneous fields often have size defined as 0, and fixed sized fields are skippable. + + Note there are restrictions: + + * If a field like Bytes(this.field) references another field in the same struct, you need to access the referenced field first (to trigger its parsing) and then you can access the Bytes field. Otherwise it would fail due to missing context entry. + * If a field references another field within inner (nested) or outer (super) struct, things may break. Context is nested, but this class was not rigorously tested in that manner. + + Building and sizeof are greedy, like in Struct. + + :param \*subcons: Construct instances, list of members, some can be anonymous + :param \*\*subconskw: Construct instances, list of members (requires Python 3.6) + """ + + def __init__(self, *subcons, **subconskw): + super().__init__() + self.subcons = list(subcons) + list(k/v for k,v in subconskw.items()) + self._subcons = Container((sc.name,sc) for sc in self.subcons if sc.name) + self._subconsindexes = Container((sc.name,i) for i,sc in enumerate(self.subcons) if sc.name) + self.flagbuildnone = all(sc.flagbuildnone for sc in self.subcons) + + def __getattr__(self, name): + if name in self._subcons: + return self._subcons[name] + raise AttributeError + + def _parse(self, stream, context, path): + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + offset = stream_tell(stream, path) + offsets = {0: offset} + values = {} + for i,sc in enumerate(self.subcons): + try: + offset += sc._actualsize(stream, context, path) + stream_seek(stream, offset, 0, path) + except SizeofError: + parseret = sc._parsereport(stream, context, path) + values[i] = parseret + if sc.name: + context[sc.name] = parseret + offset = stream_tell(stream, path) + offsets[i+1] = offset + return LazyContainer(self, stream, offsets, values, context, path) + + def _build(self, obj, stream, context, path): + # exact copy from Struct class + if obj is None: + obj = Container() + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = stream, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + context.update(obj) + for sc in self.subcons: + try: + if sc.flagbuildnone: + subobj = obj.get(sc.name, None) + else: + subobj = obj[sc.name] # raises KeyError + + if sc.name: + context[sc.name] = subobj + + buildret = sc._build(subobj, stream, context, path) + if sc.name: + context[sc.name] = buildret + except StopFieldError: + break + return context + + def _sizeof(self, context, path): + # exact copy from Struct class + context = Container(_ = context, _params = context._params, _root = None, _parsing = context._parsing, _building = context._building, _sizing = context._sizing, _subcons = self._subcons, _io = None, _index = context.get("_index", None)) + context._root = context._.get("_root", context) + try: + return sum(sc._sizeof(context, path) for sc in self.subcons) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + + +class LazyListContainer(list): + """Used internally.""" + + def __init__(self, subcon, stream, count, offsets, values, context, path): + self._subcon = subcon + self._stream = stream + self._count = count + self._offsets = offsets + self._values = values + self._context = context + self._path = path + + def __getitem__(self, index): + if isinstance(index, slice): + return [self[i] for i in range(*index.indices(self._count))] + if index in self._values: + return self._values[index] + stream_seek(self._stream, self._offsets[index], 0, self._path) # KeyError + parseret = self._subcon._parsereport(self._stream, self._context, self._path) + self._values[index] = parseret + return parseret + + def __getslice__(self, start, stop): + if stop == sys.maxsize: + stop = self._count + return self.__getitem__(slice(start, stop)) + + def __len__(self): + return self._count + + def __iter__(self): + return (self[i] for i in range(self._count)) + + def __eq__(self, other): + return len(self) == len(other) and all(self[i] == other[i] for i in range(self._count)) + + def __repr__(self): + return "" % (len(self._values), self._count, ) + + +class LazyArray(Subconstruct): + r""" + Equivalent to :class:`~construct.core.Array`, but the subcon is not parsed when possible (it gets skipped if the size can be measured by _actualsize or _sizeof method). See its docstring for details. + + Fields are parsed depending on some factors: + + * Some fields like Int* Float* Bytes(5) Array(5,Byte) Pointer are fixed-size and are therefore skipped. Stream is not read. + * Some fields like Bytes(this.field) are variable-size but their size is known during parsing when there is a corresponding context entry. Those fields are also skipped. Stream is not read. + * Some fields like Prefixed PrefixedArray PascalString are variable-size but their size can be computed by partially reading the stream. Only first few bytes are read (the lengthfield). + * Other fields like VarInt need to be parsed. Stream position that is left after the field was parsed is used. + * Some fields may not work properly, due to the fact that this class attempts to skip fields, and parses them only out of necessity. Miscellaneous fields often have size defined as 0, and fixed sized fields are skippable. + + Note there are restrictions: + + * If a field references another field within inner (nested) or outer (super) struct, things may break. Context is nested, but this class was not rigorously tested in that manner. + + Building and sizeof are greedy, like in Array. + + :param count: integer or context lambda, strict amount of elements + :param subcon: Construct instance, subcon to process individual elements + """ + + def __init__(self, count, subcon): + super().__init__(subcon) + self.count = count + + def _parse(self, stream, context, path): + sc = self.subcon + count = self.count + if callable(count): + count = count(context) + if not 0 <= count: + raise RangeError("invalid count %s" % (count,), path=path) + offset = stream_tell(stream, path) + offsets = {0: offset} + values = {} + for i in range(count): + try: + offset += sc._actualsize(stream, context, path) + stream_seek(stream, offset, 0, path) + except SizeofError: + parseret = sc._parsereport(stream, context, path) + values[i] = parseret + offset = stream_tell(stream, path) + offsets[i+1] = offset + return LazyListContainer(sc, stream, count, offsets, values, context, path) + + def _build(self, obj, stream, context, path): + # exact copy from Array class + count = self.count + if callable(count): + count = count(context) + if not 0 <= count: + raise RangeError("invalid count %s" % (count,), path=path) + if not len(obj) == count: + raise RangeError("expected %d elements, found %d" % (count, len(obj)), path=path) + retlist = ListContainer() + for i,e in enumerate(obj): + context._index = i + buildret = self.subcon._build(e, stream, context, path) + retlist.append(buildret) + return retlist + + def _sizeof(self, context, path): + # exact copy from Array class + try: + count = self.count + if callable(count): + count = count(context) + except (KeyError, AttributeError): + raise SizeofError("cannot calculate size, key not found in context", path=path) + return count * self.subcon._sizeof(context, path) + + +class LazyBound(Construct): + r""" + Field that binds to the subcon only at runtime (during parsing and building, not ctor). Useful for recursive data structures, like linked-lists and trees, where a construct needs to refer to itself (while it does not exist yet in the namespace). + + Note that it is possible to obtain same effect without using this class, using a loop. However there are usecases where that is not possible (if remaining nodes cannot be sized-up, and there is data following the recursive structure). There is also a significant difference, namely that LazyBound actually does greedy parsing while the loop does lazy parsing. See examples. + + To break recursion, use `If` field. See examples. + + :param subconfunc: parameter-less lambda returning Construct instance, can also return itself + + Example:: + + d = Struct( + "value" / Byte, + "next" / If(this.value > 0, LazyBound(lambda: d)), + ) + >>> print(d.parse(b"\x05\x09\x00")) + Container: + value = 5 + next = Container: + value = 9 + next = Container: + value = 0 + next = None + + :: + + d = Struct( + "value" / Byte, + "next" / GreedyBytes, + ) + data = b"\x05\x09\x00" + while data: + x = d.parse(data) + data = x.next + print(x) + # print outputs + Container: + value = 5 + next = \t\x00 (total 2) + # print outputs + Container: + value = 9 + next = \x00 (total 1) + # print outputs + Container: + value = 0 + next = (total 0) + """ + + def __init__(self, subconfunc): + super().__init__() + self.subconfunc = subconfunc + + def _parse(self, stream, context, path): + sc = self.subconfunc() + return sc._parsereport(stream, context, path) + + def _build(self, obj, stream, context, path): + sc = self.subconfunc() + return sc._build(obj, stream, context, path) + + +#=============================================================================== +# adapters and validators +#=============================================================================== +class ExprAdapter(Adapter): + r""" + Generic adapter that takes `decoder` and `encoder` lambdas as parameters. You can use ExprAdapter instead of writing a full-blown class deriving from Adapter when only a simple lambda is needed. + + :param subcon: Construct instance, subcon to adapt + :param decoder: lambda that takes (obj, context, path) and returns an decoded version of obj + :param encoder: lambda that takes (obj, context, path) and returns an encoded version of obj + + Example:: + + >>> d = ExprAdapter(Byte, obj_+1, obj_-1) + >>> d.parse(b'\x04') + 5 + >>> d.build(5) + b'\x04' + """ + def __init__(self, subcon, decoder, encoder): + super().__init__(subcon) + self._decode = lambda obj,ctx,path: decoder(obj,ctx) + self._encode = lambda obj,ctx,path: encoder(obj,ctx) + + +class ExprSymmetricAdapter(ExprAdapter): + """ + Macro around :class:`~construct.core.ExprAdapter`. + + :param subcon: Construct instance, subcon to adapt + :param encoder: lambda that takes (obj, context, path) and returns both encoded version and decoded version of obj + + Example:: + + >>> d = ExprSymmetricAdapter(Byte, obj_ & 0b00001111) + >>> d.parse(b"\xff") + 15 + >>> d.build(255) + b'\x0f' + """ + def __init__(self, subcon, encoder): + super().__init__(subcon, encoder, encoder) + + +class ExprValidator(Validator): + r""" + Generic adapter that takes `validator` lambda as parameter. You can use ExprValidator instead of writing a full-blown class deriving from Validator when only a simple lambda is needed. + + :param subcon: Construct instance, subcon to adapt + :param validator: lambda that takes (obj, context) and returns a bool + + Example:: + + >>> d = ExprValidator(Byte, obj_ & 0b11111110 == 0) + >>> d.build(1) + b'\x01' + >>> d.build(88) + ValidationError: object failed validation: 88 + + """ + def __init__(self, subcon, validator): + super().__init__(subcon) + self._validate = lambda obj,ctx,path: validator(obj,ctx) + + +def OneOf(subcon, valids): + r""" + Validates that the object is one of the listed values, both during parsing and building. + + .. note:: For performance, `valids` should be a set/frozenset. + + :param subcon: Construct instance, subcon to validate + :param valids: collection implementing __contains__, usually a list or set + + :raises ValidationError: parsed or build value is not among valids + + Example:: + + >>> d = OneOf(Byte, [1,2,3]) + >>> d.parse(b"\x01") + 1 + >>> d.parse(b"\xff") + construct.core.ValidationError: object failed validation: 255 + """ + return ExprValidator(subcon, lambda obj,ctx: obj in valids) + + +def NoneOf(subcon, invalids): + r""" + Validates that the object is none of the listed values, both during parsing and building. + + .. note:: For performance, `valids` should be a set/frozenset. + + :param subcon: Construct instance, subcon to validate + :param invalids: collection implementing __contains__, usually a list or set + + :raises ValidationError: parsed or build value is among invalids + + """ + return ExprValidator(subcon, lambda obj,ctx: obj not in invalids) + + +def Filter(predicate, subcon): + r""" + Filters a list leaving only the elements that passed through the predicate. + + :param subcon: Construct instance, usually Array GreedyRange Sequence + :param predicate: lambda that takes (obj, context) and returns a bool + + Can propagate any exception from the lambda, possibly non-ConstructError. + + Example:: + + >>> d = Filter(obj_ != 0, Byte[:]) + >>> d.parse(b"\x00\x02\x00") + [2] + >>> d.build([0,1,0,2,0]) + b'\x01\x02' + """ + return ExprSymmetricAdapter(subcon, lambda obj,ctx: [x for x in obj if predicate(x,ctx)]) + + +class Slicing(Adapter): + r""" + Adapter for slicing a list. Works with GreedyRange and Sequence. + + :param subcon: Construct instance, subcon to slice + :param count: integer, expected number of elements, needed during building + :param start: integer for start index (or None for entire list) + :param stop: integer for stop index (or None for up-to-end) + :param step: integer, step (or 1 for every element) + :param empty: object, value to fill the list with, during building + + Example:: + + d = Slicing(Array(4,Byte), 4, 1, 3, empty=0) + assert d.parse(b"\x01\x02\x03\x04") == [2,3] + assert d.build([2,3]) == b"\x00\x02\x03\x00" + assert d.sizeof() == 4 + """ + def __init__(self, subcon, count, start, stop, step=1, empty=None): + super().__init__(subcon) + self.count = count + self.start = start + self.stop = stop + self.step = step + self.empty = empty + def _decode(self, obj, context, path): + return obj[self.start:self.stop:self.step] + def _encode(self, obj, context, path): + if self.start is None: + return obj + elif self.stop is None: + output = [self.empty] * self.count + output[self.start::self.step] = obj + else: + output = [self.empty] * self.count + output[self.start:self.stop:self.step] = obj + return output + + +class Indexing(Adapter): + r""" + Adapter for indexing a list (getting a single item from that list). Works with Range and Sequence and their lazy equivalents. + + :param subcon: Construct instance, subcon to index + :param count: integer, expected number of elements, needed during building + :param index: integer, index of the list to get + :param empty: object, value to fill the list with, during building + + Example:: + + d = Indexing(Array(4,Byte), 4, 2, empty=0) + assert d.parse(b"\x01\x02\x03\x04") == 3 + assert d.build(3) == b"\x00\x00\x03\x00" + assert d.sizeof() == 4 + """ + def __init__(self, subcon, count, index, empty=None): + super().__init__(subcon) + self.count = count + self.index = index + self.empty = empty + def _decode(self, obj, context, path): + return obj[self.index] + def _encode(self, obj, context, path): + output = [self.empty] * self.count + output[self.index] = obj + return output + + +#=============================================================================== +# end of file +#=============================================================================== diff --git a/.venv/lib/python3.9/site-packages/construct/debug.py b/.venv/lib/python3.9/site-packages/construct/debug.py new file mode 100644 index 0000000..c486cfb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/debug.py @@ -0,0 +1,160 @@ +from construct import * +from construct.lib import * +import sys, traceback, pdb, inspect + + +class Probe(Construct): + r""" + Probe that dumps the context, and some stream content (peeks into it) to the screen to aid the debugging process. It can optionally limit itself to a single context entry, instead of printing entire context. + + :param into: optional, None by default, or context lambda + :param lookahead: optional, integer, number of bytes to dump from the stream + + Example:: + + >>> d = Struct( + ... "count" / Byte, + ... "items" / Byte[this.count], + ... Probe(lookahead=32), + ... ) + >>> d.parse(b"\x05abcde\x01\x02\x03") + + -------------------------------------------------- + Probe, path is (parsing), into is None + Stream peek: (hexlified) b'010203'... + Container: + count = 5 + items = ListContainer: + 97 + 98 + 99 + 100 + 101 + -------------------------------------------------- + + :: + + >>> d = Struct( + ... "count" / Byte, + ... "items" / Byte[this.count], + ... Probe(this.count), + ... ) + >>> d.parse(b"\x05abcde\x01\x02\x03") + + -------------------------------------------------- + Probe, path is (parsing), into is this.count + 5 + -------------------------------------------------- + + """ + + def __init__(self, into=None, lookahead=None): + super(Probe, self).__init__() + self.flagbuildnone = True + self.into = into + self.lookahead = lookahead + + def _parse(self, stream, context, path): + self.printout(stream, context, path) + + def _build(self, obj, stream, context, path): + self.printout(stream, context, path) + + def _sizeof(self, context, path): + self.printout(None, context, path) + return 0 + + def _emitparse(self, code): + return f"print({self.into})" if self.into else "print(this)" + + def _emitbuild(self, code): + return f"print({self.into})" if self.into else "print(this)" + + def printout(self, stream, context, path): + print("--------------------------------------------------") + print("Probe, path is %s, into is %r" % (path, self.into, )) + + if self.lookahead and stream is not None: + fallback = stream.tell() + datafollows = stream.read(self.lookahead) + stream.seek(fallback) + if datafollows: + print("Stream peek: (hexlified) %s..." % (hexlify(datafollows), )) + else: + print("Stream peek: EOF reached") + + if context is not None: + if self.into: + try: + subcontext = self.into(context) + print(subcontext) + except Exception: + print("Failed to compute %r on the context %r" % (self.into, context, )) + else: + print(context) + print("--------------------------------------------------") + + +class Debugger(Subconstruct): + r""" + PDB-based debugger. When an exception occurs in the subcon, a debugger will appear and allow you to debug the error (and even fix it on-the-fly). + + :param subcon: Construct instance, subcon to debug + + Example:: + + >>> Debugger(Byte[3]).build([]) + + -------------------------------------------------- + Debugging exception of + path is (building) + File "/media/arkadiusz/MAIN/GitHub/construct/construct/debug.py", line 192, in _build + return self.subcon._build(obj, stream, context, path) + File "/media/arkadiusz/MAIN/GitHub/construct/construct/core.py", line 2149, in _build + raise RangeError("expected %d elements, found %d" % (count, len(obj))) + construct.core.RangeError: expected 3 elements, found 0 + + > /media/arkadiusz/MAIN/GitHub/construct/construct/core.py(2149)_build() + -> raise RangeError("expected %d elements, found %d" % (count, len(obj))) + (Pdb) q + -------------------------------------------------- + """ + + def _parse(self, stream, context, path): + try: + return self.subcon._parse(stream, context, path) + except Exception: + self.retval = NotImplemented + self.handle_exc(path, msg="(you can set self.retval, which will be returned from method)") + if self.retval is NotImplemented: + raise + else: + return self.retval + + def _build(self, obj, stream, context, path): + try: + return self.subcon._build(obj, stream, context, path) + except Exception: + self.handle_exc(path) + + def _sizeof(self, context, path): + try: + return self.subcon._sizeof(context, path) + except Exception: + self.handle_exc(path) + + def _emitparse(self, code): + return self.subcon._compileparse(code) + + def _emitbuild(self, code): + return self.subcon._compilebuild(code) + + def handle_exc(self, path, msg=None): + print("--------------------------------------------------") + print("Debugging exception of %r" % (self.subcon, )) + print("path is %s" % (path, )) + print("".join(traceback.format_exception(*sys.exc_info())[1:])) + if msg: + print(msg) + pdb.post_mortem(sys.exc_info()[2]) + print("--------------------------------------------------") diff --git a/.venv/lib/python3.9/site-packages/construct/expr.py b/.venv/lib/python3.9/site-packages/construct/expr.py new file mode 100644 index 0000000..00a63f2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/expr.py @@ -0,0 +1,256 @@ +import operator +if not hasattr(operator, "div"): + operator.div = operator.truediv + + +opnames = { + operator.add : "+", + operator.sub : "-", + operator.mul : "*", + operator.div : "/", + operator.floordiv : "//", + operator.mod : "%", + operator.pow : "**", + operator.xor : "^", + operator.lshift : "<<", + operator.rshift : ">>", + operator.and_ : "and", + operator.or_ : "or", + operator.not_ : "not", + operator.neg : "-", + operator.pos : "+", + operator.contains : "in", + operator.gt : ">", + operator.ge : ">=", + operator.lt : "<", + operator.le : "<=", + operator.eq : "==", + operator.ne : "!=", +} + + +class ExprMixin(object): + + def __add__(self, other): + return BinExpr(operator.add, self, other) + def __sub__(self, other): + return BinExpr(operator.sub, self, other) + def __mul__(self, other): + return BinExpr(operator.mul, self, other) + def __floordiv__(self, other): + return BinExpr(operator.floordiv, self, other) + def __truediv__(self, other): + return BinExpr(operator.div, self, other) + __div__ = __floordiv__ + def __mod__(self, other): + return BinExpr(operator.mod, self, other) + def __pow__(self, other): + return BinExpr(operator.pow, self, other) + def __xor__(self, other): + return BinExpr(operator.xor, self, other) + def __rshift__(self, other): + return BinExpr(operator.rshift, self, other) + def __lshift__(self, other): + return BinExpr(operator.lshift, self, other) + def __and__(self, other): + return BinExpr(operator.and_, self, other) + def __or__(self, other): + return BinExpr(operator.or_, self, other) + + def __radd__(self, other): + return BinExpr(operator.add, other, self) + def __rsub__(self, other): + return BinExpr(operator.sub, other, self) + def __rmul__(self, other): + return BinExpr(operator.mul, other, self) + def __rfloordiv__(self, other): + return BinExpr(operator.floordiv, other, self) + def __rtruediv__(self, other): + return BinExpr(operator.div, other, self) + __rdiv__ = __rfloordiv__ + def __rmod__(self, other): + return BinExpr(operator.mod, other, self) + def __rpow__(self, other): + return BinExpr(operator.pow, other, self) + def __rxor__(self, other): + return BinExpr(operator.xor, other, self) + def __rrshift__(self, other): + return BinExpr(operator.rshift, other, self) + def __rlshift__(self, other): + return BinExpr(operator.lshift, other, self) + def __rand__(self, other): + return BinExpr(operator.and_, other, self) + def __ror__(self, other): + return BinExpr(operator.or_, other, self) + + def __neg__(self): + return UniExpr(operator.neg, self) + def __pos__(self): + return UniExpr(operator.pos, self) + def __invert__(self): + return UniExpr(operator.not_, self) + __inv__ = __invert__ + + def __contains__(self, other): + return BinExpr(operator.contains, self, other) + def __gt__(self, other): + return BinExpr(operator.gt, self, other) + def __ge__(self, other): + return BinExpr(operator.ge, self, other) + def __lt__(self, other): + return BinExpr(operator.lt, self, other) + def __le__(self, other): + return BinExpr(operator.le, self, other) + def __eq__(self, other): + return BinExpr(operator.eq, self, other) + def __ne__(self, other): + return BinExpr(operator.ne, self, other) + + def __getstate__(self): + attrs = {} + if hasattr(self, "__dict__"): + attrs.update(self.__dict__) + slots = [] + c = self.__class__ + while c is not None: + if hasattr(c, "__slots__"): + slots.extend(c.__slots__) + c = c.__base__ + for name in slots: + if hasattr(self, name): + attrs[name] = getattr(self, name) + return attrs + + def __setstate__(self, attrs): + for name, value in attrs.items(): + setattr(self, name, value) + + +class UniExpr(ExprMixin): + + def __init__(self, op, operand): + self.op = op + self.operand = operand + + def __repr__(self): + return "%s %r" % (opnames[self.op], self.operand) + + def __str__(self): + return "%s %s" % (opnames[self.op], self.operand) + + def __call__(self, obj, *args): + operand = self.operand(obj) if callable(self.operand) else self.operand + return self.op(operand) + + +class BinExpr(ExprMixin): + + def __init__(self, op, lhs, rhs): + self.op = op + self.lhs = lhs + self.rhs = rhs + + def __repr__(self): + return "(%r %s %r)" % (self.lhs, opnames[self.op], self.rhs) + + def __str__(self): + return "(%s %s %s)" % (self.lhs, opnames[self.op], self.rhs) + + def __call__(self, obj, *args): + lhs = self.lhs(obj) if callable(self.lhs) else self.lhs + rhs = self.rhs(obj) if callable(self.rhs) else self.rhs + return self.op(lhs, rhs) + + +class Path(ExprMixin): + + def __init__(self, name, field=None, parent=None): + self.__name = name + self.__field = field + self.__parent = parent + + def __repr__(self): + if self.__parent is None: + return self.__name + else: + return "%s[%r]" % (self.__parent, self.__field) + + def __str__(self): + if self.__parent is None: + return self.__name + else: + return "%s[%r]" % (self.__parent, self.__field) + + def __call__(self, obj, *args): + if self.__parent is None: + return obj + else: + return self.__parent(obj)[self.__field] + + def __getfield__(self): + return self.__field + + def __getattr__(self, name): + return Path(self.__name, name, self) + + def __getitem__(self, name): + return Path(self.__name, name, self) + + +class Path2(ExprMixin): + + def __init__(self, name, index=None, parent=None): + self.__name = name + self.__index = index + self.__parent = parent + + def __repr__(self): + if self.__parent is None: + return self.__name + else: + return "%r[%r]" % (self.__parent, self.__index) + + def __call__(self, *args): + if self.__parent is None: + return args[1] + else: + return self.__parent(*args)[self.__index] + + def __getitem__(self, index): + return Path2(self.__name, index, self) + + +class FuncPath(ExprMixin): + + def __init__(self, func, operand=None): + self.__func = func + self.__operand = operand + + def __repr__(self): + if self.__operand is None: + return "%s_" % (self.__func.__name__) + else: + return "%s_(%r)" % (self.__func.__name__, self.__operand) + + def __str__(self): + if self.__operand is None: + return "%s_" % (self.__func.__name__) + else: + return "%s_(%s)" % (self.__func.__name__, self.__operand) + + def __call__(self, operand, *args): + if self.__operand is None: + return FuncPath(self.__func, operand) if callable(operand) else operand + else: + return self.__func(self.__operand(operand) if callable(self.__operand) else self.__operand) + + +this = Path("this") +obj_ = Path("obj_") +list_ = Path2("list_") + +len_ = FuncPath(len) +sum_ = FuncPath(sum) +min_ = FuncPath(min) +max_ = FuncPath(max) +abs_ = FuncPath(abs) diff --git a/.venv/lib/python3.9/site-packages/construct/lib/__init__.py b/.venv/lib/python3.9/site-packages/construct/lib/__init__.py new file mode 100644 index 0000000..9d27c0a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/lib/__init__.py @@ -0,0 +1,52 @@ +from construct.lib.containers import * +from construct.lib.binary import * +from construct.lib.bitstream import * +from construct.lib.hex import * +from construct.lib.py3compat import * + +__all__ = [ + 'bits2bytes', + 'bits2integer', + 'byte2int', + 'bytes2bits', + 'bytes2integer', + 'bytes2integers', + 'bytes2str', + 'bytestringtype', + 'Container', + 'globalPrintFalseFlags', + 'globalPrintFullStrings', + 'HexDisplayedBytes', + 'HexDisplayedDict', + 'HexDisplayedInteger', + 'hexdump', + 'HexDumpDisplayedBytes', + 'HexDumpDisplayedDict', + 'hexlify', + 'hexundump', + 'int2byte', + 'integer2bits', + 'integer2bytes', + 'integers2bytes', + 'integertypes', + 'ListContainer', + 'ONWINDOWS', + 'PY', + 'PY2', + 'PY3', + 'PYPY', + 'RebufferedBytesIO', + 'reprstring', + 'RestreamedBytesIO', + 'setGlobalPrintFalseFlags', + 'setGlobalPrintFullStrings', + 'setGlobalPrintPrivateEntries', + 'str2bytes', + 'stringtypes', + 'swapbitsinbytes', + 'swapbytes', + 'swapbytesinbits', + 'trimstring', + 'unhexlify', + 'unicodestringtype', +] diff --git a/.venv/lib/python3.9/site-packages/construct/lib/binary.py b/.venv/lib/python3.9/site-packages/construct/lib/binary.py new file mode 100644 index 0000000..335fba0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/lib/binary.py @@ -0,0 +1,168 @@ +from construct import * +from construct.lib import * +import binascii + + +def integer2bits(number, width, signed=False): + r""" + Converts an integer into its binary representation in a bit-string. Width is the amount of bits to generate. Each bit is represented as either \\x00 or \\x01. The most significant bit is first, big-endian. This is reverse to `bits2integer`. + + Examples: + + >>> integer2bits(19, 8) + b'\x00\x00\x00\x01\x00\x00\x01\x01' + """ + if width < 0: + raise ValueError(f"width {width} must be non-negative") + if width == 0: + return b"" + + if signed: + min = -(2 ** width // 2) + max = 2 ** width // 2 - 1 + else: + min = 0 + max = 2 ** width - 1 + if not min <= number <= max: + raise ValueError(f"number {number} is out of range (min={min}, max={max})") + + if number < 0: + number += 1 << width + bits = bytearray(width) + i = width - 1 + while number and i >= 0: + bits[i] = number & 1 + number >>= 1 + i -= 1 + return bytes(bits) + + +def integer2bytes(number, width, signed=False): + r""" + Converts an integer into a byte-string. This is reverse to `bytes2integer`. + + Examples: + + >>> integer2bytes(19, 4) + '\x00\x00\x00\x13' + """ + # pypy does not check this in int.to_bytes, lazy fuckers + if width < 0: + raise ValueError(f"width {width} must be non-negative") + + try: + return int.to_bytes(number, width, 'big', signed=signed) + except OverflowError: + raise ValueError(f"number {number} does not fit width {width} signed {signed}") + + +def bits2integer(data, signed=False): + r""" + Converts a bit-string into an integer. Set signed to interpret the number as a 2-s complement signed integer. This is reverse to `integer2bits`. + + Examples: + + >>> bits2integer(b"\x01\x00\x00\x01\x01") + 19 + """ + if data == b"": + return 0 + + number = 0 + for b in data: + number = (number << 1) | b + + if signed and data[0]: + bias = 1 << len(data) + return number - bias + else: + return number + + +def bytes2integer(data, signed=False): + r""" + Converts a byte-string into an integer. This is reverse to `integer2bytes`. + + Examples: + + >>> bytes2integer(b'\x00\x00\x00\x13') + 19 + """ + return int.from_bytes(data, 'big', signed=signed) + + +BYTES2BITS_CACHE = {i:integer2bits(i,8) for i in range(256)} +def bytes2bits(data): + r""" + Converts between bit-string and byte-string representations, both as bytes type. + + Example: + + >>> bytes2bits(b'ab') + b"\x00\x01\x01\x00\x00\x00\x00\x01\x00\x01\x01\x00\x00\x00\x01\x00" + """ + return b"".join(BYTES2BITS_CACHE[b] for b in data) + + +BITS2BYTES_CACHE = {bytes2bits(int2byte(i)):i for i in range(256)} +def bits2bytes(data): + r""" + Converts between bit-string and byte-string representations, both as bytes type. Its length must be multiple of 8. + + Example: + + >>> bits2bytes(b"\x00\x01\x01\x00\x00\x00\x00\x01\x00\x01\x01\x00\x00\x00\x01\x00") + b'ab' + """ + if len(data) % 8: + raise ValueError(f"data length {len(data)} must be a multiple of 8") + return bytes(BITS2BYTES_CACHE[data[i:i+8]] for i in range(0,len(data),8)) + + +def swapbytes(data): + r""" + Performs an endianness swap on byte-string. + + Example: + + >>> swapbytes(b'abcd') + b'dcba' + """ + return data[::-1] + + +def swapbytesinbits(data): + r""" + Performs an byte-swap within a bit-string. Its length must be multiple of 8. + + Example: + + >>> swapbytesinbits(b'0000000011111111') + b'1111111100000000' + """ + if len(data) % 8: + raise ValueError(f"data length {len(data)} must be a multiple of 8") + return b"".join(data[i:i+8] for i in reversed(range(0,len(data),8))) + + +SWAPBITSINBYTES_CACHE = {i:byte2int(bits2bytes(swapbytes(bytes2bits(int2byte(i))))) for i in range(256)} +def swapbitsinbytes(data): + r""" + Performs a bit-reversal on each byte within a byte-string. + + Example: + + >>> swapbitsinbytes(b"\xf0\x00") + b"\x0f\x00" + """ + return bytes(SWAPBITSINBYTES_CACHE[b] for b in data) + + +def hexlify(data): + """Returns binascii.hexlify(data).""" + return binascii.hexlify(data) + + +def unhexlify(data): + """Returns binascii.unhexlify(data).""" + return binascii.unhexlify(data) diff --git a/.venv/lib/python3.9/site-packages/construct/lib/bitstream.py b/.venv/lib/python3.9/site-packages/construct/lib/bitstream.py new file mode 100644 index 0000000..baa44d7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/lib/bitstream.py @@ -0,0 +1,147 @@ +from io import BlockingIOError +from time import sleep +from sys import maxsize + + +class RestreamedBytesIO(object): + + def __init__(self, substream, decoder, decoderunit, encoder, encoderunit): + self.substream = substream + self.encoder = encoder + self.encoderunit = encoderunit + self.decoder = decoder + self.decoderunit = decoderunit + self.rbuffer = b"" + self.wbuffer = b"" + self.sincereadwritten = 0 + + def read(self, count=None): + if count is None: + while True: + data = self.substream.read(self.decoderunit) + if data is None or len(data) == 0: + break + self.rbuffer += self.decoder(data) + data, self.rbuffer = self.rbuffer, b'' + self.sincereadwritten += len(data) + return data + + else: + if count < 0: + raise ValueError("count cannot be negative") + while len(self.rbuffer) < count: + data = self.substream.read(self.decoderunit) + if data is None or len(data) == 0: + return b'' + self.rbuffer += self.decoder(data) + data, self.rbuffer = self.rbuffer[:count], self.rbuffer[count:] + self.sincereadwritten += count + return data + + def write(self, data): + self.wbuffer += data + datalen = len(data) + while len(self.wbuffer) >= self.encoderunit: + data, self.wbuffer = self.wbuffer[:self.encoderunit], self.wbuffer[self.encoderunit:] + self.substream.write(self.encoder(data)) + self.sincereadwritten += datalen + return datalen + + def close(self): + if len(self.rbuffer): + raise ValueError("closing stream but %d unread bytes remain, %d is decoded unit" % (len(self.rbuffer), self.decoderunit)) + if len(self.wbuffer): + raise ValueError("closing stream but %d unwritten bytes remain, %d is encoded unit" % (len(self.wbuffer), self.encoderunit)) + + def seek(self, at, whence=0): + if whence == 0 and at == self.sincereadwritten: + pass + else: + raise IOError + + def seekable(self): + return False + + def tell(self): + """WARNING: tell is correct only on read-only and write-only instances.""" + return self.sincereadwritten + + def tellable(self): + return True + + +class RebufferedBytesIO(object): + + def __init__(self, substream, tailcutoff=None): + self.substream = substream + self.offset = 0 + self.rwbuffer = b"" + self.moved = 0 + self.tailcutoff = tailcutoff + + def read(self, count=None): + if count is None: + raise ValueError("count must be integer, reading until EOF not supported") + startsat = self.offset + endsat = startsat + count + if startsat < self.moved: + raise IOError("could not read because tail was cut off") + while self.moved + len(self.rwbuffer) < endsat: + try: + newdata = self.substream.read(128*1024) + except BlockingIOError: + newdata = None + if not newdata: + sleep(0) + continue + self.rwbuffer += newdata + data = self.rwbuffer[startsat-self.moved:endsat-self.moved] + self.offset += count + if self.tailcutoff is not None and self.moved < self.offset - self.tailcutoff: + removed = self.offset - self.tailcutoff - self.moved + self.moved += removed + self.rwbuffer = self.rwbuffer[removed:] + if len(data) < count: + raise IOError("could not read enough bytes, something went wrong") + return data + + def write(self, data): + startsat = self.offset + endsat = startsat + len(data) + while self.moved + len(self.rwbuffer) < startsat: + newdata = self.substream.read(128*1024) + self.rwbuffer += newdata + if not newdata: + sleep(0) + self.rwbuffer = self.rwbuffer[:startsat-self.moved] + data + self.rwbuffer[endsat-self.moved:] + self.offset = endsat + if self.tailcutoff is not None and self.moved < self.offset - self.tailcutoff: + removed = self.offset - self.tailcutoff - self.moved + self.moved += removed + self.rwbuffer = self.rwbuffer[removed:] + return len(data) + + def seek(self, at, whence=0): + if whence == 0: + self.offset = at + return self.offset + elif whence == 1: + self.offset += at + return self.offset + else: + raise ValueError("this class seeks only with whence: 0 and 1 (excluded 2)") + + def seekable(self): + return True + + def tell(self): + return self.offset + + def tellable(self): + return True + + def cachedfrom(self): + return self.moved + + def cachedto(self): + return self.moved + len(self.rwbuffer) diff --git a/.venv/lib/python3.9/site-packages/construct/lib/containers.py b/.venv/lib/python3.9/site-packages/construct/lib/containers.py new file mode 100644 index 0000000..4febd64 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/lib/containers.py @@ -0,0 +1,306 @@ +from construct.lib.py3compat import * +import re +import collections + + +globalPrintFullStrings = False +globalPrintFalseFlags = False +globalPrintPrivateEntries = False + + +def setGlobalPrintFullStrings(enabled=False): + r""" + When enabled, Container __str__ produces full content of bytes and unicode strings, otherwise and by default, it produces truncated output (16 bytes and 32 characters). + + :param enabled: bool + """ + global globalPrintFullStrings + globalPrintFullStrings = enabled + + +def setGlobalPrintFalseFlags(enabled=False): + r""" + When enabled, Container __str__ that was produced by FlagsEnum parsing prints all values, otherwise and by default, it prints only the values that are True. + + :param enabled: bool + """ + global globalPrintFalseFlags + globalPrintFalseFlags = enabled + + +def setGlobalPrintPrivateEntries(enabled=False): + r""" + When enabled, Container __str__ shows keys like _ _index _etc, otherwise and by default, it hides those keys. __repr__ never shows private entries. + + :param enabled: bool + """ + global globalPrintPrivateEntries + globalPrintPrivateEntries = enabled + + +def recursion_lock(retval="", lock_name="__recursion_lock__"): + """Used internally.""" + def decorator(func): + def wrapper(self, *args, **kw): + if getattr(self, lock_name, False): + return retval + setattr(self, lock_name, True) + try: + return func(self, *args, **kw) + finally: + delattr(self, lock_name) + + wrapper.__name__ = func.__name__ + return wrapper + + return decorator + + +class Container(collections.OrderedDict): + r""" + Generic ordered dictionary that allows both key and attribute access, and preserves key order by insertion. Adding keys is preferred using \*\*entrieskw (requires Python 3.6). Equality does NOT check item order. Also provides regex searching. + + Example:: + + # empty dict + >>> Container() + # list of pairs, not recommended + >>> Container([ ("name","anonymous"), ("age",21) ]) + # This syntax requires Python 3.6 + >>> Container(name="anonymous", age=21) + # copies another dict + >>> Container(dict2) + >>> Container(container2) + + :: + + >>> print(repr(obj)) + Container(text='utf8 decoded string...', value=123) + >>> print(obj) + Container + text = u'utf8 decoded string...' (total 22) + value = 123 + """ + __slots__ = ["__recursion_lock__"] + + def __getattr__(self, name): + try: + if name in self.__slots__: + return object.__getattribute__(self, name) + else: + return self[name] + except KeyError: + raise AttributeError(name) + + def __setattr__(self, name, value): + try: + if name in self.__slots__: + return object.__setattr__(self, name, value) + else: + self[name] = value + except KeyError: + raise AttributeError(name) + + def __delattr__(self, name): + try: + if name in self.__slots__: + return object.__delattr__(self, name) + else: + del self[name] + except KeyError: + raise AttributeError(name) + + def update(self, seqordict): + """Appends items from another dict/Container or list-of-tuples.""" + if isinstance(seqordict, dict): + seqordict = seqordict.items() + for k,v in seqordict: + self[k] = v + + def copy(self): + return Container(self) + + __update__ = update + + __copy__ = copy + + def __dir__(self): + """For auto completion of attributes based on container values.""" + return list(self.keys()) + list(self.__class__.__dict__) + dir(super(Container, self)) + + def __eq__(self, other): + if self is other: + return True + if not isinstance(other, dict): + return False + def isequal(v1, v2): + if v1.__class__.__name__ == "ndarray" or v2.__class__.__name__ == "ndarray": + import numpy + return numpy.array_equal(v1, v2) + return v1 == v2 + for k,v in self.items(): + if isinstance(k, unicodestringtype) and k.startswith(u"_"): + continue + if isinstance(k, bytestringtype) and k.startswith(b"_"): + continue + if k not in other or not isequal(v, other[k]): + return False + for k,v in other.items(): + if isinstance(k, unicodestringtype) and k.startswith(u"_"): + continue + if isinstance(k, bytestringtype) and k.startswith(b"_"): + continue + if k not in self or not isequal(v, self[k]): + return False + return True + + def __ne__(self, other): + return not self == other + + @recursion_lock() + def __repr__(self): + parts = [] + for k,v in self.items(): + if isinstance(k, str) and k.startswith("_"): + continue + if isinstance(v, stringtypes): + parts.append(str(k) + "=" + reprstring(v)) + else: + parts.append(str(k) + "=" + repr(v)) + return "Container(%s)" % ", ".join(parts) + + @recursion_lock() + def __str__(self): + indentation = "\n " + text = ["Container: "] + isflags = getattr(self, "_flagsenum", False) + for k,v in self.items(): + if isinstance(k, str) and k.startswith("_") and not globalPrintPrivateEntries: + continue + if isflags and not v and not globalPrintFalseFlags: + continue + text.extend([indentation, str(k), " = "]) + if v.__class__.__name__ == "EnumInteger": + text.append("(enum) (unknown) %s" % (v, )) + elif v.__class__.__name__ == "EnumIntegerString": + text.append("(enum) %s %s" % (v, v.intvalue, )) + elif v.__class__.__name__ in ["HexDisplayedBytes", "HexDumpDisplayedBytes"]: + text.append(indentation.join(str(v).split("\n"))) + elif isinstance(v, bytestringtype): + printingcap = 16 + if len(v) <= printingcap or globalPrintFullStrings: + text.append("%s (total %d)" % (reprstring(v), len(v))) + else: + text.append("%s... (truncated, total %d)" % (reprstring(v[:printingcap]), len(v))) + elif isinstance(v, unicodestringtype): + printingcap = 32 + if len(v) <= printingcap or globalPrintFullStrings: + text.append("%s (total %d)" % (reprstring(v), len(v))) + else: + text.append("%s... (truncated, total %d)" % (reprstring(v[:printingcap]), len(v))) + else: + text.append(indentation.join(str(v).split("\n"))) + return "".join(text) + + def _search(self, compiled_pattern, search_all): + items = [] + for key in self.keys(): + try: + if isinstance(self[key], (Container,ListContainer)): + ret = self[key]._search(compiled_pattern, search_all) + if ret is not None: + if search_all: + items.extend(ret) + else: + return ret + elif compiled_pattern.match(key): + if search_all: + items.append(self[key]) + else: + return self[key] + except: + pass + if search_all: + return items + else: + return None + + def search(self, pattern): + """ + Searches a container (non-recursively) using regex. + """ + compiled_pattern = re.compile(pattern) + return self._search(compiled_pattern, False) + + def search_all(self, pattern): + """ + Searches a container (recursively) using regex. + """ + compiled_pattern = re.compile(pattern) + return self._search(compiled_pattern, True) + + +class ListContainer(list): + r""" + Generic container like list. Provides pretty-printing. Also provides regex searching. + + Example:: + + >>> ListContainer() + >>> ListContainer([1, 2, 3]) + + :: + + >>> print(repr(obj)) + [1, 2, 3] + >>> print(obj) + ListContainer + 1 + 2 + 3 + """ + + @recursion_lock() + def __repr__(self): + return "ListContainer(%s)" % (list.__repr__(self), ) + + @recursion_lock() + def __str__(self): + indentation = "\n " + text = ["ListContainer: "] + for k in self: + text.append(indentation) + lines = str(k).split("\n") + text.append(indentation.join(lines)) + return "".join(text) + + def _search(self, compiled_pattern, search_all): + items = [] + for item in self: + try: + ret = item._search(compiled_pattern, search_all) + except: + continue + if ret is not None: + if search_all: + items.extend(ret) + else: + return ret + if search_all: + return items + else: + return None + + def search(self, pattern): + """ + Searches a container (non-recursively) using regex. + """ + compiled_pattern = re.compile(pattern) + return self._search(compiled_pattern, False) + + def search_all(self, pattern): + """ + Searches a container (recursively) using regex. + """ + compiled_pattern = re.compile(pattern) + return self._search(compiled_pattern, True) diff --git a/.venv/lib/python3.9/site-packages/construct/lib/hex.py b/.venv/lib/python3.9/site-packages/construct/lib/hex.py new file mode 100644 index 0000000..97046e4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/lib/hex.py @@ -0,0 +1,94 @@ +from construct.lib.py3compat import * +import binascii + + +class HexDisplayedInteger(integertypes[0]): + """Used internally.""" + def __str__(self): + return "0x" + format(self, self.fmtstr).upper() + + @staticmethod + def new(intvalue, fmtstr): + obj = HexDisplayedInteger(intvalue) + obj.fmtstr = fmtstr + return obj + +class HexDisplayedBytes(bytestringtype): + """Used internally.""" + def __str__(self): + if not hasattr(self, "render"): + self.render = "unhexlify(%s)" % (trimstring(binascii.hexlify(self)), ) + return self.render + +class HexDisplayedDict(dict): + """Used internally.""" + def __str__(self): + if not hasattr(self, "render"): + self.render = "unhexlify(%s)" % (trimstring(binascii.hexlify(self["data"])), ) + return self.render + +class HexDumpDisplayedBytes(bytestringtype): + """Used internally.""" + def __str__(self): + if not hasattr(self, "render"): + self.render = hexdump(self, 16) + return self.render + +class HexDumpDisplayedDict(dict): + """Used internally.""" + def __str__(self): + if not hasattr(self, "render"): + self.render = hexdump(self["data"], 16) + return self.render + + +# Map an integer in the inclusive range 0-255 to its string byte representation +PRINTABLE = [bytes2str(int2byte(i)) if 32 <= i < 128 else '.' for i in range(256)] +HEXPRINT = [format(i, '02X') for i in range(256)] + + +def hexdump(data, linesize): + r""" + Turns bytes into a unicode string of the format: + + :: + + >>>print(hexdump(b'0' * 100, 16)) + hexundump(\"\"\" + 0000 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 0000000000000000 + 0010 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 0000000000000000 + 0020 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 0000000000000000 + 0030 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 0000000000000000 + 0040 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 0000000000000000 + 0050 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 0000000000000000 + 0060 30 30 30 30 0000 + \"\"\") + """ + if len(data) < 16**4: + fmt = "%%04X %%-%ds %%s" % (3*linesize-1,) + elif len(data) < 16**8: + fmt = "%%08X %%-%ds %%s" % (3*linesize-1,) + else: + raise ValueError("hexdump cannot process more than 16**8 or 4294967296 bytes") + prettylines = [] + prettylines.append('hexundump("""') + for i in range(0, len(data), linesize): + line = data[i:i+linesize] + hextext = " ".join(HEXPRINT[b] for b in line) + rawtext = "".join(PRINTABLE[b] for b in line) + prettylines.append(fmt % (i, str(hextext), str(rawtext))) + prettylines.append('""")') + prettylines.append("") + return "\n".join(prettylines) + + +def hexundump(data, linesize): + r""" + Reverse of `hexdump`. + """ + raw = [] + for line in data.split("\n")[1:-2]: + line = line[line.find(" "):].lstrip() + bytes = [int2byte(int(s,16)) for s in line[:3*linesize].split()] + raw.extend(bytes) + return b"".join(raw) diff --git a/.venv/lib/python3.9/site-packages/construct/lib/py3compat.py b/.venv/lib/python3.9/site-packages/construct/lib/py3compat.py new file mode 100644 index 0000000..8408144 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/lib/py3compat.py @@ -0,0 +1,51 @@ +import sys, platform + +PY = sys.version_info[:2] +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PYPY = '__pypy__' in sys.builtin_module_names +ONWINDOWS = platform.system() == "Windows" + +stringtypes = (bytes, str, ) +integertypes = (int, ) +unicodestringtype = str +bytestringtype = bytes + +INT2BYTE_CACHE = {i:bytes([i]) for i in range(256)} +def int2byte(character): + """Converts (0 through 255) integer into b'...' character.""" + return INT2BYTE_CACHE[character] + +def byte2int(character): + """Converts b'...' character into (0 through 255) integer.""" + return character[0] + +def str2bytes(string): + """Converts '...' string into b'...' string. On PY2 they are equivalent. On PY3 its utf8 encoded.""" + return string.encode("utf8") + +def bytes2str(string): + """Converts b'...' string into '...' string. On PY2 they are equivalent. On PY3 its utf8 decoded.""" + return string.decode("utf8") + +def reprstring(data): + """Ensures there is b- u- prefix before the string.""" + if isinstance(data, bytes): + return repr(data) + if isinstance(data, str): + return 'u' + repr(data) + +def trimstring(data): + """Trims b- u- prefix""" + if isinstance(data, bytes): + return repr(data)[1:] + if isinstance(data, str): + return repr(data) + +def integers2bytes(ints): + """Converts integer generator into bytes.""" + return bytes(ints) + +def bytes2integers(data): + """Converts bytes into integer list, so indexing/iterating yields integers.""" + return list(data) diff --git a/.venv/lib/python3.9/site-packages/construct/version.py b/.venv/lib/python3.9/site-packages/construct/version.py new file mode 100644 index 0000000..70a6577 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct/version.py @@ -0,0 +1,3 @@ +version = (2,10,67) +version_string = "2.10.67" +release_date = "2021.04.21" diff --git a/.venv/lib/python3.9/site-packages/construct_typed/__init__.py b/.venv/lib/python3.9/site-packages/construct_typed/__init__.py new file mode 100644 index 0000000..00d5093 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typed/__init__.py @@ -0,0 +1,44 @@ +from .dataclass_struct import ( + DataclassBitStruct, + DataclassMixin, + DataclassStruct, + TBitStruct, + TContainerBase, + TContainerMixin, + TStruct, + TStructField, + csfield, + sfield, +) +from .generic_wrapper import ( + Adapter, + ConstantOrContextLambda, + Construct, + Context, + ListContainer, + PathType, +) +from .tenum import EnumBase, FlagsEnumBase, TEnum, TFlagsEnum + +__all__ = [ + "DataclassBitStruct", + "DataclassMixin", + "DataclassStruct", + "TBitStruct", + "TContainerBase", + "TContainerMixin", + "TStruct", + "TStructField", + "csfield", + "sfield", + "EnumBase", + "FlagsEnumBase", + "TEnum", + "TFlagsEnum", + "Adapter", + "ConstantOrContextLambda", + "Construct", + "Context", + "ListContainer", + "PathType", +] diff --git a/.venv/lib/python3.9/site-packages/construct_typed/dataclass_struct.py b/.venv/lib/python3.9/site-packages/construct_typed/dataclass_struct.py new file mode 100644 index 0000000..78d2e59 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typed/dataclass_struct.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# pyright: strict +import dataclasses +import textwrap +import typing as t + +import construct as cs +from construct.lib.containers import ( + globalPrintFullStrings, + globalPrintPrivateEntries, + recursion_lock, +) +from construct.lib.py3compat import bytestringtype, reprstring, unicodestringtype + +from .generic_wrapper import Adapter, Construct, Context, ParsedType, PathType + + +class DataclassMixin: + """ + Mixin for the dataclasses which are passed to "DataclassStruct" and "DataclassBitStruct". + + Note: This implementation is different to the 'cs.Container' of the original 'construct' + library. In the original 'cs.Container' some names like "update", "keys", "items", ... can + only accessed via key access (square brackets) and not via attribute access (dot operator), + because they are also method names. This implementation is based on "dataclasses.dataclass" + which only uses modul-level instead of instance-level helper methods.So no instance-level + methods exists and every name can be used. + """ + + def __getitem__(self, key: str) -> t.Any: + return getattr(self, key) + + def __setitem__(self, key: str, value: t.Any) -> None: + setattr(self, key, value) + + @recursion_lock() + def __str__(self) -> str: + indentation = "\n " + text = [f"{self.__class__.__name__}: "] + + for field in dataclasses.fields(self): + k = field.name + v = getattr(self, field.name) + if k.startswith("_") and not globalPrintPrivateEntries: + continue + text.extend([indentation, str(k), " = "]) + if v.__class__.__name__ == "EnumInteger": + text.append("(enum) (unknown) %s" % (v,)) + elif v.__class__.__name__ == "EnumIntegerString": + text.append("(enum) %s %s" % (v, v.intvalue)) + elif v.__class__.__name__ in ["HexDisplayedBytes", "HexDumpDisplayedBytes"]: + text.append(indentation.join(str(v).split("\n"))) + elif isinstance(v, bytestringtype): + printingcap = 16 + if len(v) <= printingcap or globalPrintFullStrings: + text.append("%s (total %d)" % (reprstring(v), len(v))) + else: + text.append( + "%s... (truncated, total %d)" + % (reprstring(v[:printingcap]), len(v)) + ) + elif isinstance(v, unicodestringtype): + printingcap = 32 + if len(v) <= printingcap or globalPrintFullStrings: + text.append("%s (total %d)" % (reprstring(v), len(v))) + else: + text.append( + "%s... (truncated, total %d)" + % (reprstring(v[:printingcap]), len(v)) + ) + else: + text.append(indentation.join(str(v).split("\n"))) + return "".join(text) + + +def csfield( + subcon: Construct[ParsedType, t.Any], + doc: t.Optional[str] = None, + parsed: t.Optional[t.Callable[[t.Any, Context], None]] = None, +) -> ParsedType: + """ + Helper method for "DataclassStruct" and "DataclassBitStruct" to create the dataclass fields. + + This method also processes Const and Default, to pass these values als default values to the dataclass. + """ + orig_subcon = subcon + + # Rename subcon, if doc or parsed are available + if (doc is not None) or (parsed is not None): + if doc is not None: + doc = textwrap.dedent(doc).strip("\n") + subcon = cs.Renamed(subcon, newdocs=doc, newparsed=parsed) + + if orig_subcon.flagbuildnone is True: + init = False + default = None + else: + init = True + default = dataclasses.MISSING + + # Set default values in case of special sucons + if isinstance(orig_subcon, cs.Const): + const_subcon: "cs.Const[t.Any, t.Any, t.Any, t.Any]" = orig_subcon + default = const_subcon.value + elif isinstance(orig_subcon, cs.Default): + default_subcon: "cs.Default[t.Any, t.Any, t.Any, t.Any]" = orig_subcon + if callable(default_subcon.value): + default = None # context lambda is only defined at parsing/building + else: + default = default_subcon.value + + return t.cast( + ParsedType, + dataclasses.field( + default=default, + init=init, + metadata={"subcon": subcon}, + ), + ) + + +DataclassType = t.TypeVar("DataclassType", bound=DataclassMixin) + + +class DataclassStruct(Adapter[t.Any, t.Any, DataclassType, DataclassType]): + """ + Adapter for a dataclasses for optimised type hints / static autocompletion in comparision to the original Struct. + + Before this construct can be created a dataclasses.dataclass type must be created, which must also derive from DataclassMixin. In this dataclass all fields must be assigned to a construct type using csfield. + + Internally, all fields are converted to a Struct, which does the actual parsing/building. + + Parses to a dataclasses.dataclass instance, and builds from such instance. Size is the sum of all subcon sizes, unless any subcon raises SizeofError. + + :param dc_type: Type of the dataclass, which also inherits from DataclassMixin + :param reverse: Flag if the fields of the dataclass should be reversed + + Example:: + + >>> import dataclasses + >>> from construct import Bytes, Int8ub, this + >>> from construct_typed import DataclassMixin, DataclassStruct, csfield + >>> @dataclasses.dataclass + ... class Image(DataclassMixin): + ... width: int = csfield(Int8ub) + ... height: int = csfield(Int8ub) + ... pixels: bytes = csfield(Bytes(this.height * this.width)) + >>> d = DataclassStruct(Image) + >>> d.parse(b"\x01\x0212") + Image(width=1, height=2, pixels=b'12') + """ + + subcon: "cs.Struct[t.Any, t.Any]" + if t.TYPE_CHECKING: + + def __new__( + cls, + dc_type: t.Type[DataclassType], + reverse: bool = False, + ) -> "DataclassStruct[DataclassType]": + ... + + def __init__( + self, + dc_type: t.Type[DataclassType], + reverse: bool = False, + ) -> None: + if not issubclass(dc_type, DataclassMixin): + raise TypeError(f"'{repr(dc_type)}' has to be a '{repr(DataclassMixin)}'") + if not dataclasses.is_dataclass(dc_type): + raise TypeError(f"'{repr(dc_type)}' has to be a 'dataclasses.dataclass'") + self.dc_type = dc_type + self.reverse = reverse + + # get all fields from the dataclass + fields = dataclasses.fields(self.dc_type) + if self.reverse: + fields = tuple(reversed(fields)) + + # extract the construct formats from the struct_type + subcon_fields = {} + for field in fields: + subcon_fields[field.name] = field.metadata["subcon"] + + # init adatper + super().__init__(cs.Struct(**subcon_fields)) # type: ignore + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.subcon, name) + + def _decode( + self, obj: "cs.Container[t.Any]", context: Context, path: PathType + ) -> DataclassType: + # get all fields from the dataclass + fields = dataclasses.fields(self.dc_type) + + # extract all fields from the container, that are used for create the dataclass object + dc_init = {} + for field in fields: + if field.init: + value = obj[field.name] + dc_init[field.name] = value + + # create object of dataclass + dc = self.dc_type(**dc_init) # type: ignore + + # extract all other values from the container, an pass it to the dataclass + for field in fields: + if not field.init: + value = obj[field.name] + setattr(dc, field.name, value) + + return dc + + def _encode( + self, obj: DataclassType, context: Context, path: PathType + ) -> t.Dict[str, t.Any]: + if not isinstance(obj, self.dc_type): + raise TypeError(f"'{repr(obj)}' has to be of type {repr(self.dc_type)}") + + # get all fields from the dataclass + fields = dataclasses.fields(self.dc_type) + + # extract all fields from the container, that are used for create the dataclass object + ret_dict: t.Dict[str, t.Any] = {} + for field in fields: + value = getattr(obj, field.name) + ret_dict[field.name] = value + + return ret_dict + + +def DataclassBitStruct( + dc_type: t.Type[DataclassType], reverse: bool = False +) -> t.Union[ + "cs.Transformed[DataclassType, DataclassType]", + "cs.Restreamed[DataclassType, DataclassType]", +]: + r""" + Makes a DataclassStruct inside a Bitwise. + + See :class:`~construct.core.Bitwise` and :class:`~construct_typed.dataclass_struct.DatclassStruct` for semantics and raisable exceptions. + + :param dc_type: Type of the dataclass, which also inherits from DataclassMixin + :param reverse: Flag if the fields of the dataclass should be reversed + + Example:: + + DataclassBitStruct <--> Bitwise(DataclassStruct(...)) + >>> import dataclasses + >>> from construct import BitsInteger, Flag, Nibble, Padding + >>> from construct_typed import DataclassBitStruct, DataclassMixin, csfield + >>> @dataclasses.dataclass + ... class TestDataclass(DataclassMixin): + ... a: int = csfield(Flag) + ... b: int = csfield(Nibble) + ... c: int = csfield(BitsInteger(10)) + ... d: None = csfield(Padding(1)) + >>> d = DataclassBitStruct(TestDataclass) + >>> d.parse(b"\x01\x02") + TestDataclass(a=False, b=0, c=129, d=None) + """ + return cs.Bitwise(DataclassStruct(dc_type, reverse)) + + +# support legacy names +TStruct = DataclassStruct +TBitStruct = DataclassBitStruct +TContainerMixin = DataclassMixin +TContainerBase = DataclassMixin +TStructField = csfield +sfield = csfield \ No newline at end of file diff --git a/.venv/lib/python3.9/site-packages/construct_typed/generic_wrapper.py b/.venv/lib/python3.9/site-packages/construct_typed/generic_wrapper.py new file mode 100644 index 0000000..aa4af4c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typed/generic_wrapper.py @@ -0,0 +1,41 @@ +import typing as t + +ParsedType = t.TypeVar("ParsedType", covariant=True) +BuildTypes = t.TypeVar("BuildTypes", contravariant=True) +SubconParsedType = t.TypeVar("SubconParsedType", covariant=True) +SubconBuildTypes = t.TypeVar("SubconBuildTypes", contravariant=True) +ListType = t.TypeVar("ListType") +ValueType = t.TypeVar("ValueType") + + +if t.TYPE_CHECKING: + # while type checking, the original classes are already generics, because they are defined like this in the stubs. + from construct import Adapter as Adapter + from construct import ConstantOrContextLambda as ConstantOrContextLambda + from construct import Construct as Construct + from construct import Context as Context + from construct import ListContainer as ListContainer + from construct import PathType as PathType + + +else: + import construct as cs + + # at runtime, the original classes are no generics, so whe have to make new classes with generics support + class Construct(t.Generic[ParsedType, BuildTypes], cs.Construct): + pass + + class Adapter( + t.Generic[SubconParsedType, SubconBuildTypes, ParsedType, BuildTypes], + cs.Adapter, + ): + pass + + class ListContainer(t.Generic[ListType], cs.ListContainer): + pass + + class Context: + pass + + ConstantOrContextLambda = t.Union[ValueType, t.Callable[[Context], t.Any]] + PathType = str diff --git a/.venv/lib/python3.9/site-packages/construct_typed/py.typed b/.venv/lib/python3.9/site-packages/construct_typed/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/construct_typed/tenum.py b/.venv/lib/python3.9/site-packages/construct_typed/tenum.py new file mode 100644 index 0000000..7c93e33 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typed/tenum.py @@ -0,0 +1,125 @@ +import enum +import typing as t + +from .generic_wrapper import * + + +# ## TEnum ############################################################################################################ +class EnumBase(enum.IntEnum): + """ + Base class for an Enum used in `construct_typed.TEnum`. + + This class extends the standard `enum.IntEnum`, so that missing values are automatically generated. + """ + + # Extend the enum type with __missing__ method. So if a enum value + # not found in the enum, a new pseudo member is created. + # The idea is taken from: https://stackoverflow.com/a/57179436 + @classmethod + def _missing_(cls, value: t.Any) -> t.Optional["EnumBase"]: + if isinstance(value, int): + return cls._create_pseudo_member_(value) + return None # will raise the ValueError in Enum.__new__ + + @classmethod + def _create_pseudo_member_(cls, value: int) -> "EnumBase": + pseudo_member = cls._value2member_map_.get(value, None) # type: ignore + if pseudo_member is None: + new_member = int.__new__(cls, value) + # I expect a name attribute to hold a string, hence str(value) + # However, new_member._name_ = value works, too + new_member._name_ = str(value) + new_member._value_ = value + pseudo_member = cls._value2member_map_.setdefault(value, new_member) # type: ignore + return pseudo_member # type: ignore + + +EnumType = t.TypeVar("EnumType", bound=EnumBase) + + +class TEnum(Adapter[int, int, EnumType, EnumType]): + """ + Typed enum. + """ + + if t.TYPE_CHECKING: + + def __new__( + cls, subcon: Construct[int, int], enum_type: t.Type[EnumType] + ) -> "TEnum[EnumType]": + ... + + def __init__(self, subcon: Construct[int, int], enum_type: t.Type[EnumType]): + if not issubclass(enum_type, EnumBase): + raise TypeError( + "'{}' has to be a '{}'".format(repr(enum_type), repr(EnumBase)) + ) + + # save enum type + self.enum_type = t.cast(t.Type[EnumType], enum_type) # type: ignore + + # init adatper + super(TEnum, self).__init__(subcon) # type: ignore + + def _decode(self, obj: int, context: Context, path: PathType) -> EnumType: + return self.enum_type(obj) + + def _encode( + self, + obj: EnumType, + context: Context, + path: PathType, + ) -> int: + if isinstance(obj, self.enum_type): + return int(obj) + raise TypeError( + "'{}' has to be of type {}".format(repr(obj), repr(self.enum_type)) + ) + + +# ## TFlagsEnum ####################################################################################################### +class FlagsEnumBase(enum.IntFlag): + pass + + +FlagsEnumType = t.TypeVar("FlagsEnumType", bound=FlagsEnumBase) + + +class TFlagsEnum(Adapter[int, int, FlagsEnumType, FlagsEnumType]): + """ + Typed enum. + """ + + if t.TYPE_CHECKING: + + def __new__( + cls, subcon: Construct[int, int], enum_type: t.Type[FlagsEnumType] + ) -> "TFlagsEnum[FlagsEnumType]": + ... + + def __init__(self, subcon: Construct[int, int], enum_type: t.Type[FlagsEnumType]): + if not issubclass(enum_type, FlagsEnumBase): + raise TypeError( + "'{}' has to be a '{}'".format(repr(enum_type), repr(FlagsEnumBase)) + ) + + # save enum type + self.enum_type = t.cast(t.Type[FlagsEnumType], enum_type) # type: ignore + + # init adatper + super(TFlagsEnum, self).__init__(subcon) # type: ignore + + def _decode(self, obj: int, context: Context, path: PathType) -> FlagsEnumType: + return self.enum_type(obj) + + def _encode( + self, + obj: FlagsEnumType, + context: Context, + path: PathType, + ) -> int: + if isinstance(obj, self.enum_type): + return int(obj) + raise TypeError( + "'{}' has to be of type {}".format(repr(obj), repr(self.enum_type)) + ) diff --git a/.venv/lib/python3.9/site-packages/construct_typed/version.py b/.venv/lib/python3.9/site-packages/construct_typed/version.py new file mode 100644 index 0000000..1a555cf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typed/version.py @@ -0,0 +1,2 @@ +version = (0, 5, 2) +version_string = "0.5.2" \ No newline at end of file diff --git a/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/LICENSE new file mode 100644 index 0000000..fefaa1e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 timrid + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/METADATA b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/METADATA new file mode 100644 index 0000000..ff2b638 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/METADATA @@ -0,0 +1,141 @@ +Metadata-Version: 2.1 +Name: construct-typing +Version: 0.5.2 +Summary: Extension for the python package 'construct' that adds typing features +Home-page: https://github.com/timrid/construct-typing +Author: Tim Riddermann +License: MIT +Keywords: construct,kaitai,declarative,data structure,struct,binary,symmetric,parser,builder,parsing,building,pack,unpack,packer,unpacker,bitstring,bytestring,annotation,type hint,typing,typed,bitstruct,PEP 561 +Platform: POSIX +Platform: Windows +Classifier: Development Status :: 3 - Alpha +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Code Generators +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Typing :: Typed +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: construct (==2.10.67) + +# construct-typing +[![PyPI](https://img.shields.io/pypi/v/construct-typing)](https://pypi.org/project/construct-typing/) +![PyPI - Implementation](https://img.shields.io/pypi/implementation/construct-typing) +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/construct-typing) +![GitHub](https://img.shields.io/github/license/timrid/construct-typing) + +This project is an extension of the python package [*construct*](https://pypi.org/project/construct/), which is a powerful **declarative** and **symmetrical** parser and builder for binary data. This Repository consists of two packages: + +- **construct-stubs**: Adding .pyi for the whole *construct 2.10* package (according to [PEP 561 stub-only packages](https://www.python.org/dev/peps/pep-0561/#stub-only-packages)) +- **construct_typed**: Adding additional classes that help with autocompletion and additional type hints. + +## Installation +This package comply to [PEP 561](https://www.python.org/dev/peps/pep-0561/). So most of the static code analysers will recognise the stubs automatically. The installation only requires: +``` +pip install construct-typing +``` + +## Tests +The stubs are tested against the pytests of the *construct* package in a slightly modified form. Since the tests are relatively detailed I think most cases are covered. + +The new typed constructs have new written pytests, which also passes all pytests and the static type checkers. + +The following static type checkers are fully supported: +- mypy +- pyright + +## Explanation +### Stubs +The **construct-stubs** package is used for creating type hints for the orignial *construct* package. In particular the `build` and `parse` methods get type hints. So the core of the stubs are the `TypeVar`'s `ParsedType` and `BuildTypes`: +- `Construct.build`: converts an object of one of the types defined by `BuildTypes` to a `bytes` object. +- `Construct.parse`: converts a `bytes` object to an object of type `ParsedType`. + +For each `Construct` the stub file defines to which type it parses to and from which it can be build. For example: + +| Construct | parses to (ParsedType) | builds from (BuildTypes) | +| -------------------- | ------------------------------ | ---------------------------------------- | +| `Int16ub` | `int` | `int` | +| `Bytes` | `bytes` | `bytes`, `bytearray` or `memoryview` | +| `Array(5, Int16ub)` | `ListContainer[int]` | `typing.List[int]` | +| `Struct("i" / Byte)` | `Container[typing.Any]` | `typing.Dict[str, typing.Any]` or `None` | + +The problem is to describe the more complex constructs like: + - `Sequence`, `FocusedSeq` which has heterogenous subcons in comparison to an `Array` with only homogenous subcons. + - `Struct`, `BitStruct`, `LazyStruct`, `Union` which has heterogenous and named subcons. + +Currently only the very unspecific type `typing.Any` can be used as type hint (maybe in the future it can be optimised a little, when [variadic generics](https://mail.python.org/archives/list/typing-sig@python.org/thread/SQVTQYWIOI4TIO7NNBTFFWFMSMS2TA4J/) become available). But the biggest disadvantage is that autocompletion for the named subcons is not available. + +Note: The stubs are based on *construct* in Version 2.10. + + +### Typed +**!!! EXPERIMENTAL VERSION !!!** + +To include autocompletion and further enhance the type hints for these complex constructs the **construct_typed** package is used as an extension to the original *construct* package. It is mainly a few Adapters with the focus on type hints. + +It implements the following new constructs: +- `DataclassStruct`: similar to `construct.Struct` but strictly tied to `DataclassMixin` and `@dataclasses.dataclass` +- `DataclassBitStruct`: similar to `construct.BitStruct` but strictly tied to `DataclassMixin` and `@dataclasses.dataclass` +- `TEnum`: similar to `construct.Enum` but strictly tied to a `TEnumBase` class +- `TFlagsEnum`: similar to `construct.FlagsEnum` but strictly tied to a `TFlagsEnumBase` class + +These types are strongly typed, which means that there is no difference between the `ParsedType` and the `BuildTypes`. So to build one of the constructs the correct type is enforced. The disadvantage is that the code will be a little bit longer, because you can not for example use a normal `dict` to build an `DataclassStruct`. But the big advantage is, that if you use the correct container type instead of a `dict`, the static code analyses can do its magic and find potential type errors and missing values without running the code itself. + + +A short example: + +```python +import dataclasses +import typing as t +from construct import Array, Byte, Const, Int8ub, this +from construct_typed import DataclassMixin, DataclassStruct, EnumBase, TEnum, csfield + +class Orientation(EnumBase): + HORIZONTAL = 0 + VERTICAL = 1 + +@dataclasses.dataclass +class Image(DataclassMixin): + signature: bytes = csfield(Const(b"BMP")) + orientation: Orientation = csfield(TEnum(Int8ub, Orientation)) + width: int = csfield(Int8ub) + height: int = csfield(Int8ub) + pixels: t.List[int] = csfield(Array(this.width * this.height, Byte)) + +format = DataclassStruct(Image) +obj = Image( + orientation=Orientation.VERTICAL, + width=3, + height=2, + pixels=[7, 8, 9, 11, 12, 13], +) +print(format.build(obj)) +print(format.parse(b"BMP\x01\x03\x02\x07\x08\t\x0b\x0c\r")) +``` +Output: +``` +b'BMP\x01\x03\x02\x07\x08\t\x0b\x0c\r' +Image: + signature = b'BMP' (total 3) + orientation = Orientation.VERTICAL + width = 3 + height = 2 + pixels = ListContainer: + 7 + 8 + 9 + 11 + 12 + 13 +``` + + + + diff --git a/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/RECORD b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/RECORD new file mode 100644 index 0000000..efe6b5d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/RECORD @@ -0,0 +1,28 @@ +construct-stubs/__init__.pyi,sha256=fZAAeMk7Q2IAz7A1DlOHXKDxHRzxMjx3kBTEq9NKIdg,3935 +construct-stubs/core.pyi,sha256=KNJ_BCmZzM6eMS0myK22fjzAH_o1K8-6iGDS2tMT41I,41574 +construct-stubs/debug.pyi,sha256=z98VOn11ILF51JARYGHz9ddC770sGpNPTIWb3glaqW4,333 +construct-stubs/expr.pyi,sha256=usjbWFoHg-SXysltiOrhrIPx-cmrhvg7dAr4AeRUaI8,28341 +construct-stubs/lib/__init__.pyi,sha256=Qe9PE7wG6WBTbCv2fBdJZB4hl-gMg3W0ZIECApDMN3s,1116 +construct-stubs/lib/binary.pyi,sha256=YiJzmVuRJXIAdN0xintsMb-Od8oVaHGNUKCW4SxfYSI,577 +construct-stubs/lib/bitstream.pyi,sha256=kCGYOZVr4_9mN3UESAXMkxIkbHOgXrhN2Aqkj9a34t0,1507 +construct-stubs/lib/containers.pyi,sha256=3rBSgDD9sW7L83PqFHBOTzOebn0IC2CAl2y-qaSIE4w,1156 +construct-stubs/lib/hex.pyi,sha256=BxFiPdsldT3CaEh831nND-ZoQyrIMHKDE10yg5yiOlI,261 +construct-stubs/lib/py3compat.pyi,sha256=pQWCA2VG6kTzD2aDAQKsUccpL2ydV9Nbs1G_bY0_Xr4,596 +construct-stubs/version.pyi,sha256=jXTDom-6iCZfcAaf8qbnAs9dxvlp1JEnqE5kSLSt3YQ,90 +construct_typed/__init__.py,sha256=XpirgGDlfw1mqvpEqP4km1GRT73PBSYColrZ0ULyZy4,806 +construct_typed/__pycache__/__init__.cpython-39.pyc,, +construct_typed/__pycache__/dataclass_struct.cpython-39.pyc,, +construct_typed/__pycache__/generic_wrapper.cpython-39.pyc,, +construct_typed/__pycache__/tenum.cpython-39.pyc,, +construct_typed/__pycache__/version.cpython-39.pyc,, +construct_typed/dataclass_struct.py,sha256=RbE80SYtIU039PYGP6buCvUANpwPKJJtKFcCr7EZDLk,10061 +construct_typed/generic_wrapper.py,sha256=tOzPI376k-YuLdg00rBhWDDNGr7tkgeFb_4avhG-rlA,1371 +construct_typed/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +construct_typed/tenum.py,sha256=u9wdColwSOTHZ7hRqYG-btcUoi1XuMllmooMxZte-jo,4015 +construct_typed/version.py,sha256=cqJR7c0CsiyRgONYnbcSmEX8UaBts4lR5f24g8gp0ew,44 +construct_typing-0.5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +construct_typing-0.5.2.dist-info/LICENSE,sha256=2jsxntLd5gIKkh-pOhExtb_YxhjAm-GL1KHOdBoeSMY,1063 +construct_typing-0.5.2.dist-info/METADATA,sha256=uHp2Hqffi7eFul4_HMCrBSDryf1g4MneZ0CjKCNEh6c,6963 +construct_typing-0.5.2.dist-info/RECORD,, +construct_typing-0.5.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +construct_typing-0.5.2.dist-info/top_level.txt,sha256=Nu1xIXFb7aYaqn7GQ3lAIyy2QiaB9U9XFDoITj6vZ-U,32 diff --git a/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/top_level.txt new file mode 100644 index 0000000..26d90c8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/construct_typing-0.5.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +construct-stubs +construct_typed diff --git a/.venv/lib/python3.9/site-packages/distutils-precedence.pth b/.venv/lib/python3.9/site-packages/distutils-precedence.pth new file mode 100644 index 0000000..7f009fe --- /dev/null +++ b/.venv/lib/python3.9/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/LICENSE.txt b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/METADATA new file mode 100644 index 0000000..5478c3c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/METADATA @@ -0,0 +1,194 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.12.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.6 + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.6-3.9) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. + + diff --git a/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/RECORD new file mode 100644 index 0000000..693c50a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/RECORD @@ -0,0 +1,51 @@ +h11-0.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.12.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.12.0.dist-info/METADATA,sha256=_X-4TWqWCxSJ_mDyAbZPzdxHqP290_yVu09nelJOk04,8109 +h11-0.12.0.dist-info/RECORD,, +h11-0.12.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +h11-0.12.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=3gYpvQiX8_6-dyXaAxQt_sIYREVTz1T-zB5Lf4hjKt0,909 +h11/__pycache__/__init__.cpython-39.pyc,, +h11/__pycache__/_abnf.cpython-39.pyc,, +h11/__pycache__/_connection.cpython-39.pyc,, +h11/__pycache__/_events.cpython-39.pyc,, +h11/__pycache__/_headers.cpython-39.pyc,, +h11/__pycache__/_readers.cpython-39.pyc,, +h11/__pycache__/_receivebuffer.cpython-39.pyc,, +h11/__pycache__/_state.cpython-39.pyc,, +h11/__pycache__/_util.cpython-39.pyc,, +h11/__pycache__/_version.cpython-39.pyc,, +h11/__pycache__/_writers.cpython-39.pyc,, +h11/_abnf.py,sha256=tMKqgOEkTHHp8sPd_gmU9Qowe_yXXrihct63RX2zJsg,4637 +h11/_connection.py,sha256=XFZ-LPb3C2vgF4v5ifmcJqX-a2tHkItucJ7uIGvPYZA,24964 +h11/_events.py,sha256=IJtM7i2TxKv0S-givq2b-oehPVsmsbsIelTW6NHcIvg,9834 +h11/_headers.py,sha256=P2h8Q39SIFiRS9CpYjAwo_99XKJUvLHjn0U3tnm4qHE,9130 +h11/_readers.py,sha256=DmJKQwH9Iu7U3WNljKB09d6iJIO6P2_WtylJEY3HvPY,7280 +h11/_receivebuffer.py,sha256=pMOLWjS53haaCm73O6tSWKFD_6BQQWzVLqLCm2ouvcE,5029 +h11/_state.py,sha256=Upg0_uiO_C_QNXHxLB4YUprEeoeso0i_ma12SOrrA54,12167 +h11/_util.py,sha256=Lw_CoIUMR8wjnvgKwo94FCdmFcIbRQsokmxpBV7LcTI,4387 +h11/_version.py,sha256=14wRZqPo0n2t5kFKCQLsldnyZAfOZoKPJbbwJnbGPcc,686 +h11/_writers.py,sha256=dj8HQ4Pnzq5SjkUZrgh3RKQ6-8Ecy9RKC1MjSo27y4s,4173 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-39.pyc,, +h11/tests/__pycache__/helpers.cpython-39.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-39.pyc,, +h11/tests/__pycache__/test_connection.cpython-39.pyc,, +h11/tests/__pycache__/test_events.cpython-39.pyc,, +h11/tests/__pycache__/test_headers.cpython-39.pyc,, +h11/tests/__pycache__/test_helpers.cpython-39.pyc,, +h11/tests/__pycache__/test_io.cpython-39.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-39.pyc,, +h11/tests/__pycache__/test_state.cpython-39.pyc,, +h11/tests/__pycache__/test_util.cpython-39.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=nKheRzldPf278C81d_9_Mb9yWsYJ5udwKg_oq-fAz-U,2528 +h11/tests/test_against_stdlib_http.py,sha256=aA4oDd3_jXkapvW0ER9dbGxIiNt6Ytsfs3U2Rd5XtUc,3700 +h11/tests/test_connection.py,sha256=1WybI9IQROZ0QPtR2wQjetPIR_Jwsvw5i5j2fO7XtcI,36375 +h11/tests/test_events.py,sha256=RTPFBIg81Muc7ZoDhsLwaZxthD76R1UCzHF5nzsbM-Q,5182 +h11/tests/test_headers.py,sha256=pa-WMjCk8ZXJFABkojr2db7ZKrgNKiwl-D-hjjt6-Eg,5390 +h11/tests/test_helpers.py,sha256=mPOAiv4HtyG0_T23K_ihh1JUs0y71ykD47c9r3iVtz0,573 +h11/tests/test_io.py,sha256=oaIEAy3ktA_e1xuyP09fX_GiSlS7GKMlFhQIdkg-EhI,15494 +h11/tests/test_receivebuffer.py,sha256=nZ9_LXj3wfyOn4dkgvjnDjZeNTEtxO8-lNphAB0FVF0,3399 +h11/tests/test_state.py,sha256=JMKqA2d2wtskf7FbsAr1s9qsIul4WtwdXVAOCUJgalk,8551 +h11/tests/test_util.py,sha256=j28tMloUSuhlpUxmgvS1PRurRFSbyzWb7yCTp6qy9_Q,2710 diff --git a/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/WHEEL new file mode 100644 index 0000000..385faab --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11-0.12.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/.venv/lib/python3.9/site-packages/h11/__init__.py b/.venv/lib/python3.9/site-packages/h11/__init__.py new file mode 100644 index 0000000..ae39e01 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/__init__.py @@ -0,0 +1,21 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from ._connection import * +from ._events import * +from ._state import * +from ._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from ._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ["ProtocolError", "LocalProtocolError", "RemoteProtocolError"] +__all__ += _events.__all__ +__all__ += _connection.__all__ +__all__ += _state.__all__ diff --git a/.venv/lib/python3.9/site-packages/h11/_abnf.py b/.venv/lib/python3.9/site-packages/h11/_abnf.py new file mode 100644 index 0000000..e6d49e1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_abnf.py @@ -0,0 +1,129 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"\r\n".format(**globals()) +) diff --git a/.venv/lib/python3.9/site-packages/h11/_connection.py b/.venv/lib/python3.9/site-packages/h11/_connection.py new file mode 100644 index 0000000..6f796ef --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_connection.py @@ -0,0 +1,585 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. + +from ._events import * # Import all event types +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS +from ._receivebuffer import ReceiveBuffer +from ._state import * # Import all state sentinels +from ._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from ._util import ( # Import the internal things we need + LocalProtocolError, + make_sentinel, + RemoteProtocolError, +) +from ._writers import WRITERS + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + +NEED_DATA = make_sentinel("NEED_DATA") +PAUSED = make_sentinel("PAUSED") + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/reponse line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event): + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing(request_method, event): + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, our_role, max_incomplete_event_size=DEFAULT_MAX_INCOMPLETE_EVENT_SIZE + ): + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version = None + self._request_method = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self): + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self): + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self): + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self): + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self): + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role): + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event): + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role, event): + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + # self._request_method + if type(event) is Request: + self._request_method = event.method + + # self.their_http_version + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive(event): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object(self, role, event, io_dict): + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing(self._request_method, event) + return io_dict[SEND_BODY][framing_type](*args) + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes(self, old_states, event=None): + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self): + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data): + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event(self): + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event + + def next_event(self): + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, event) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event): + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event): + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self): + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. It mutates the response event in-place (but not the + # response.headers list object). + def _clean_up_response_headers_for_sending(self, response): + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = self._request_method + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", ["chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + response.headers = headers diff --git a/.venv/lib/python3.9/site-packages/h11/_events.py b/.venv/lib/python3.9/site-packages/h11/_events.py new file mode 100644 index 0000000..1827930 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_events.py @@ -0,0 +1,302 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re + +from . import _headers +from ._abnf import request_target +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +request_target_re = re.compile(request_target.encode("ascii")) + + +class _EventBundle: + _fields = [] + _defaults = {} + + def __init__(self, **kwargs): + _parsed = kwargs.pop("_parsed", False) + allowed = set(self._fields) + for kwarg in kwargs: + if kwarg not in allowed: + raise TypeError( + "unrecognized kwarg {} for {}".format( + kwarg, self.__class__.__name__ + ) + ) + required = allowed.difference(self._defaults) + for field in required: + if field not in kwargs: + raise TypeError( + "missing required kwarg {} for {}".format( + field, self.__class__.__name__ + ) + ) + self.__dict__.update(self._defaults) + self.__dict__.update(kwargs) + + # Special handling for some fields + + if "headers" in self.__dict__: + self.headers = _headers.normalize_and_validate( + self.headers, _parsed=_parsed + ) + + if not _parsed: + for field in ["method", "target", "http_version", "reason"]: + if field in self.__dict__: + self.__dict__[field] = bytesify(self.__dict__[field]) + + if "status_code" in self.__dict__: + if not isinstance(self.status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + self.status_code = int(self.status_code) + + self._validate() + + def _validate(self): + pass + + def __repr__(self): + name = self.__class__.__name__ + kwarg_strs = [ + "{}={}".format(field, self.__dict__[field]) for field in self._fields + ] + kwarg_str = ", ".join(kwarg_strs) + return "{}({})".format(name, kwarg_str) + + # Useful for tests + def __eq__(self, other): + return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ + + # This is an unhashable type. + __hash__ = None + + +class Request(_EventBundle): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + _fields = ["method", "target", "headers", "http_version"] + _defaults = {"http_version": b"1.1"} + + def _validate(self): + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(request_target_re, self.target, "Illegal target characters") + + +class _ResponseBase(_EventBundle): + _fields = ["status_code", "headers", "http_version", "reason"] + _defaults = {"http_version": b"1.1", "reason": b""} + + +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def _validate(self): + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 600). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def _validate(self): + if not (200 <= self.status_code < 600): + raise LocalProtocolError( + "Response status_code should be in range [200, 600), not {}".format( + self.status_code + ) + ) + + +class Data(_EventBundle): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + _fields = ["data", "chunk_start", "chunk_end"] + _defaults = {"chunk_start": False, "chunk_end": False} + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +class EndOfMessage(_EventBundle): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + _fields = ["headers"] + _defaults = {"headers": []} + + +class ConnectionClosed(_EventBundle): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/.venv/lib/python3.9/site-packages/h11/_headers.py b/.venv/lib/python3.9/site-packages/h11/_headers.py new file mode 100644 index 0000000..7ed39bc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_headers.py @@ -0,0 +1,242 @@ +import re + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(br"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers: + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items): + self._full_items = full_items + + def __iter__(self): + for _, name, value in self._full_items: + yield name, value + + def __bool__(self): + return bool(self._full_items) + + def __eq__(self, other): + return list(self) == list(other) + + def __len__(self): + return len(self._full_items) + + def __repr__(self): + return "" % repr(list(self)) + + def __getitem__(self, idx): + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self): + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +def normalize_and_validate(headers, _parsed=False): + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers, name): + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers, name, new_values): + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request): + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/.venv/lib/python3.9/site-packages/h11/_readers.py b/.venv/lib/python3.9/site-packages/h11/_readers.py new file mode 100644 index 0000000..0ead0be --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_readers.py @@ -0,0 +1,222 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import * +from ._state import * +from ._util import LocalProtocolError, RemoteProtocolError, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) + +# Remember that this has to run in O(n) time -- so e.g. the bytearray cast is +# critical. +obs_fold_re = re.compile(br"[ \t]+") + + +def _obsolete_line_fold(lines): + it = iter(lines) + last = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines(lines): + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf): + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server(buf): + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + # Tolerate missing reason phrases + if matches["reason"] is None: + matches["reason"] = b"" + status_code = matches["status_code"] = int(matches["status_code"]) + class_ = InformationalResponse if status_code < 200 else Response + return class_( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +class ContentLengthReader: + def __init__(self, length): + self._length = length + self._remaining = length + + def __call__(self, buf): + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self): + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self): + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf): + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self): + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf): + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self): + return EndOfMessage() + + +def expect_nothing(buf): + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +READERS = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/.venv/lib/python3.9/site-packages/h11/_receivebuffer.py b/.venv/lib/python3.9/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..a3737f3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_receivebuffer.py @@ -0,0 +1,152 @@ +import re +import sys + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self): + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike): + self._data += byteslike + return self + + def __bool__(self): + return bool(len(self)) + + def __len__(self): + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self): + return bytes(self._data) + + def _extract(self, count): + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count): + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self): + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self): + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self): + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/.venv/lib/python3.9/site-packages/h11/_state.py b/.venv/lib/python3.9/site-packages/h11/_state.py new file mode 100644 index 0000000..0f08a09 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_state.py @@ -0,0 +1,307 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! + +from ._events import * +from ._util import LocalProtocolError, make_sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + +CLIENT = make_sentinel("CLIENT") +SERVER = make_sentinel("SERVER") + +# States +IDLE = make_sentinel("IDLE") +SEND_RESPONSE = make_sentinel("SEND_RESPONSE") +SEND_BODY = make_sentinel("SEND_BODY") +DONE = make_sentinel("DONE") +MUST_CLOSE = make_sentinel("MUST_CLOSE") +CLOSED = make_sentinel("CLOSED") +ERROR = make_sentinel("ERROR") + +# Switch types +MIGHT_SWITCH_PROTOCOL = make_sentinel("MIGHT_SWITCH_PROTOCOL") +SWITCHED_PROTOCOL = make_sentinel("SWITCHED_PROTOCOL") + +_SWITCH_UPGRADE = make_sentinel("_SWITCH_UPGRADE") +_SWITCH_CONNECT = make_sentinel("_SWITCH_CONNECT") + +EVENT_TRIGGERED_TRANSITIONS = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self): + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals = set() + + self.states = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role): + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self): + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event): + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event(self, role, event_type, server_switch_event=None): + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + event_type = (event_type, server_switch_event) + if server_switch_event is None and event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, event_type) + # Special case: the server state does get to see Request + # events. + if event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions(self, role, event_type): + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) + self.states[role] = new_state + + def _fire_state_triggered_transitions(self): + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self): + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/.venv/lib/python3.9/site-packages/h11/_util.py b/.venv/lib/python3.9/site-packages/h11/_util.py new file mode 100644 index 0000000..eb1a5cd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_util.py @@ -0,0 +1,122 @@ +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "make_sentinel", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg, error_status_hint=400): + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self): + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate(regex, data, msg="malformed data", *format_args): + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). +class _SentinelBase(type): + def __repr__(self): + return self.__name__ + + +def make_sentinel(name): + cls = _SentinelBase(name, (_SentinelBase,), {}) + cls.__class__ = cls + return cls + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s): + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/.venv/lib/python3.9/site-packages/h11/_version.py b/.venv/lib/python3.9/site-packages/h11/_version.py new file mode 100644 index 0000000..cb5c2c3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.12.0" diff --git a/.venv/lib/python3.9/site-packages/h11/_writers.py b/.venv/lib/python3.9/site-packages/h11/_writers.py new file mode 100644 index 0000000..cb5e8a8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/_writers.py @@ -0,0 +1,123 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from ._events import Data, EndOfMessage +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError + +__all__ = ["WRITERS"] + + +def write_headers(headers, write): + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request, write): + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response(response, write): + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event, write): + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length): + self._length = length + + def send_data(self, data, write): + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers, write): + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data, write): + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers, write): + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data, write): + write(data) + + def send_eom(self, headers, write): + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WRITERS = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/.venv/lib/python3.9/site-packages/h11/tests/__init__.py b/.venv/lib/python3.9/site-packages/h11/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/h11/tests/data/test-file b/.venv/lib/python3.9/site-packages/h11/tests/data/test-file new file mode 100644 index 0000000..d0be0a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/.venv/lib/python3.9/site-packages/h11/tests/helpers.py b/.venv/lib/python3.9/site-packages/h11/tests/helpers.py new file mode 100644 index 0000000..9d2cf38 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/helpers.py @@ -0,0 +1,77 @@ +from .._connection import * +from .._events import * +from .._state import * + + +def get_all_events(conn): + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn, data): + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events): + out_events = [] + for event in in_events: + if type(event) is Data: + event.data = bytes(event.data) + event.chunk_start = False + event.chunk_end = False + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1].data += event.data + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self): + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self): + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send(self, role, send_events, expect="match"): + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_against_stdlib_http.py b/.venv/lib/python3.9/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 0000000..e6c5db4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,111 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server(handler): + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path): + return test_file_path + + +def test_h11_as_client(): + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self): + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server(): + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_connection.py b/.venv/lib/python3.9/site-packages/h11/tests/test_connection.py new file mode 100644 index 0000000..baadec8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1078 @@ +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import * +from .._state import * +from .._util import LocalProtocolError, RemoteProtocolError +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive(): + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") + ) + + assert _keep_alive(Response(status_code=200, headers=[])) + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) + + +def test__body_framing(): + def headers(cl, te): + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp(status_code=200, cl=None, te=False): + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl=None, te=False): + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length(): + with pytest.raises(ValueError): + Connection("CLIENT") + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked(): + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries(): + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server(): + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client(): + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response(): + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling(): + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue(): + def setup(): + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure(): + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple(): + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send(SERVER, [Response(status_code=200, headers=[]), EndOfMessage()]) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send(SERVER, [Response(status_code=404, headers=[]), EndOfMessage()]) + + +def test_pipelining(): + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch(): + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[]), + Response(status_code=200, headers=[]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup(): + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") == [] + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple(): + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup(): + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states(): + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [Response(status_code=200, headers=[]), EndOfMessage()] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close(): + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile(): + class SendfilePlaceholder: + def __len__(self): + return 10 + + placeholder = SendfilePlaceholder() + + def setup(header, http_version): + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data + data[data.index(placeholder)] = b"x" * 10 + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" + + c, data = setup(None, "1.0") + assert data == [placeholder] + assert c.our_state is SEND_BODY + + +def test_errors(): + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role): + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) + bad = Response(status_code=200, headers=[], http_version="1.0") + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing(): + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop(): + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout(): + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[])) + + +# This used to raise IndexError +def test_empty_request(): + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response(): + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data): + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data): + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers(): + def setup(method, http_version): + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body(): + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_events.py b/.venv/lib/python3.9/site-packages/h11/tests/test_events.py new file mode 100644 index 0000000..e20f741 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_events.py @@ -0,0 +1,179 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import * +from .._util import LocalProtocolError + + +def test_event_bundle(): + class T(_events._EventBundle): + _fields = ["a", "b"] + _defaults = {"b": 1} + + def _validate(self): + if self.a == 0: + raise ValueError + + # basic construction and methods + t = T(a=1, b=0) + assert repr(t) == "T(a=1, b=0)" + assert t == T(a=1, b=0) + assert not (t == T(a=2, b=0)) + assert not (t != T(a=1, b=0)) + assert t != T(a=2, b=0) + with pytest.raises(TypeError): + hash(t) + + # check defaults + t = T(a=10) + assert t.a == 10 + assert t.b == 1 + + # no positional args + with pytest.raises(TypeError): + T(1) + + with pytest.raises(TypeError): + T(1, a=1, b=0) + + # unknown field + with pytest.raises(TypeError): + T(a=1, b=0, c=10) + + # missing required field + with pytest.raises(TypeError) as exc: + T(b=0) + # make sure we error on the right missing kwarg + assert "kwarg a" in str(exc.value) + + # _validate is called + with pytest.raises(ValueError): + T(a=0, b=0) + + +def test_events(): + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_char in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_char) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code(): + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing(): + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_headers.py b/.venv/lib/python3.9/site-packages/h11/tests/test_headers.py new file mode 100644 index 0000000..ff3dc8d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_headers.py @@ -0,0 +1,151 @@ +import pytest + +from .._headers import * + + +def test_normalize_and_validate(): + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header(): + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue(): + from .._events import Request + + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_helpers.py b/.venv/lib/python3.9/site-packages/h11/tests/test_helpers.py new file mode 100644 index 0000000..1477947 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,23 @@ +from .helpers import * + + +def test_normalize_data_events(): + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_io.py b/.venv/lib/python3.9/site-packages/h11/tests/test_io.py new file mode 100644 index 0000000..459a627 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_io.py @@ -0,0 +1,544 @@ +import pytest + +from .._events import * +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import * +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer, obj): + got_list = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer, obj, expected): + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data): + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader, data, expected): + def check(got): + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + print(name, value) + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple(): + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple(): + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual(): + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual(): + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes(): + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter(reader, buf, do_eof): + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args): + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk, data, expected, do_eof=False): + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader(): + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader(): + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader(): + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + +def test_ContentLengthWriter(): + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter(): + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer(): + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line(): + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line(): + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line(): + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path(): + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies(): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first(): + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_receivebuffer.py b/.venv/lib/python3.9/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 0000000..3a61f9d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,134 @@ +import re + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer(): + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data): + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_state.py b/.venv/lib/python3.9/site-packages/h11/tests/test_state.py new file mode 100644 index 0000000..efe83f0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_state.py @@ -0,0 +1,250 @@ +import pytest + +from .._events import * +from .._state import * +from .._state import _SWITCH_CONNECT, _SWITCH_UPGRADE, ConnectionState +from .._util import LocalProtocolError + + +def test_ConnectionState(): + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive(): + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE(): + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied(): + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted(): + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch(): + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch(): + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction(): + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse(): + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal(): + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/.venv/lib/python3.9/site-packages/h11/tests/test_util.py b/.venv/lib/python3.9/site-packages/h11/tests/test_util.py new file mode 100644 index 0000000..d851bdc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/h11/tests/test_util.py @@ -0,0 +1,99 @@ +import re +import sys +import traceback + +import pytest + +from .._util import * + + +def test_ProtocolError(): + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError(): + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk(): + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate(): + my_re = re.compile(br"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting(): + my_re = re.compile(br"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel(): + S = make_sentinel("S") + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + S2 = make_sentinel("S2") + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify(): + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/LICENSE.md b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/LICENSE.md new file mode 100644 index 0000000..311b2b5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/METADATA b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/METADATA new file mode 100644 index 0000000..cf84d2a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/METADATA @@ -0,0 +1,422 @@ +Metadata-Version: 2.1 +Name: httpcore +Version: 0.13.7 +Summary: A minimal low-level HTTP client. +Home-page: https://github.com/encode/httpcore +Author: Tom Christie +Author-email: tom@tomchristie.com +License: BSD +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Source, https://github.com/encode/httpcore +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE.md +Requires-Dist: h11 (<0.13,>=0.11) +Requires-Dist: sniffio (==1.*) +Requires-Dist: anyio (==3.*) +Provides-Extra: http2 +Requires-Dist: h2 (<5,>=3) ; extra == 'http2' + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Provides both sync and async interfaces. +* Supports HTTP/1.1 and HTTP/2. +* Async backend support for `asyncio`, `trio` and `curio`. +* Automatic connection pooling. +* HTTP(S) proxy support. + +## Installation + +For HTTP/1.1 only support, install with... + +```shell +$ pip install httpcore +``` + +For HTTP/1.1 and HTTP/2 support, install with... + +```shell +$ pip install httpcore[http2] +``` + +## Quickstart + +Here's an example of making an HTTP GET request using `httpcore`... + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')], + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +Or, using async... + +```python +async with httpcore.AsyncConnectionPool() as http: + status_code, headers, stream, extensions = await http.handle_async_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')], + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = await stream.aread() + print(status_code, body) +``` + +## Motivation + +You probably don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancelation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. + + diff --git a/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/RECORD b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/RECORD new file mode 100644 index 0000000..cc74147 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/RECORD @@ -0,0 +1,67 @@ +httpcore-0.13.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-0.13.7.dist-info/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore-0.13.7.dist-info/METADATA,sha256=AD2A2icHFW5_CQo9WqHR3vmKaeTFXZkW2Zi_6gbFSJ8,13025 +httpcore-0.13.7.dist-info/RECORD,, +httpcore-0.13.7.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +httpcore-0.13.7.dist-info/top_level.txt,sha256=kYeSB6l1hBNp7JwgSwLajcsxRlrSCVKOhYKSkdgx798,59 +httpcore/__init__.py,sha256=udEv1w02RmsdoGNMPCxH1hOcZTFiEBXsnnNUoizC4Po,1656 +httpcore/__pycache__/__init__.cpython-39.pyc,, +httpcore/__pycache__/_bytestreams.cpython-39.pyc,, +httpcore/__pycache__/_exceptions.cpython-39.pyc,, +httpcore/__pycache__/_threadlock.cpython-39.pyc,, +httpcore/__pycache__/_types.cpython-39.pyc,, +httpcore/__pycache__/_utils.cpython-39.pyc,, +httpcore/_async/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_async/__pycache__/__init__.cpython-39.pyc,, +httpcore/_async/__pycache__/base.cpython-39.pyc,, +httpcore/_async/__pycache__/connection.cpython-39.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-39.pyc,, +httpcore/_async/__pycache__/http.cpython-39.pyc,, +httpcore/_async/__pycache__/http11.cpython-39.pyc,, +httpcore/_async/__pycache__/http2.cpython-39.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-39.pyc,, +httpcore/_async/base.py,sha256=uhEgVbp_560r6-80PRxK6jjV4OSuzYdbWY26K_OARC8,3264 +httpcore/_async/connection.py,sha256=ORhAgJVzI5PrQNU9w0ecsSiDsF0IuIUwKLQSkmBUajY,8350 +httpcore/_async/connection_pool.py,sha256=s5Ff430j36OL3lnJNzEHShNgMhJoQ9cSO03s11Gvl6U,13146 +httpcore/_async/http.py,sha256=6CG3ZiBXXxR-kGCpdyOWHuMTcgfp-ajPxkdAdMFf8Og,1285 +httpcore/_async/http11.py,sha256=oGrRxz4DxT6PnjP8bfLmaWvQ5NzI6OcBfUiuZZ7U078,9396 +httpcore/_async/http2.py,sha256=av5Ee5yM3hnDjiMb2paN3ObENCebCmDKfYUmPjXAtno,17082 +httpcore/_async/http_proxy.py,sha256=yDD8hXHtVHU8gLT_9VBPhgHfF0ebB6DOPlbjiuH6Viw,10004 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-39.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-39.pyc,, +httpcore/_backends/__pycache__/asyncio.cpython-39.pyc,, +httpcore/_backends/__pycache__/auto.cpython-39.pyc,, +httpcore/_backends/__pycache__/base.cpython-39.pyc,, +httpcore/_backends/__pycache__/curio.cpython-39.pyc,, +httpcore/_backends/__pycache__/sync.cpython-39.pyc,, +httpcore/_backends/__pycache__/trio.cpython-39.pyc,, +httpcore/_backends/anyio.py,sha256=OL7llxbbOv2pkzA5hjQR4mW0SLgDUEuJK0x_mD97Nu0,6317 +httpcore/_backends/asyncio.py,sha256=rg9-BCdRqD65_4EC6U0D-jMXkK4oV_PbYfPBeYptYj0,10700 +httpcore/_backends/auto.py,sha256=DhL7k6Iww7qkugkpeBzPQq4mySCCb9G_PK-w_zOqVUc,2211 +httpcore/_backends/base.py,sha256=hmAUxgADI-fmWciRs4iBxa0A2E-avawuaOWocX_A9nM,3796 +httpcore/_backends/curio.py,sha256=Zr3mfo7q8wpfkzXv3atEyAkbB-4NtndYWw56gEh7kDQ,6230 +httpcore/_backends/sync.py,sha256=W9WQq2lLOqZ1IhirZATFDDvKVWAdSJjeNja_vwZIg8E,5494 +httpcore/_backends/trio.py,sha256=nwEuP6_xIIFy6vqBs0XXxfqROk99GnDyLhiOIsJHcsQ,6818 +httpcore/_bytestreams.py,sha256=aZQvmevkf27rgnwMwumkOpzK5GBSwbe1WTTnkNvS910,2430 +httpcore/_exceptions.py,sha256=xieninAoG-IeEIma6OIjNDlUfUAYyH_Hx652U2RVKws,1115 +httpcore/_sync/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_sync/__pycache__/__init__.cpython-39.pyc,, +httpcore/_sync/__pycache__/base.cpython-39.pyc,, +httpcore/_sync/__pycache__/connection.cpython-39.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-39.pyc,, +httpcore/_sync/__pycache__/http.cpython-39.pyc,, +httpcore/_sync/__pycache__/http11.cpython-39.pyc,, +httpcore/_sync/__pycache__/http2.cpython-39.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-39.pyc,, +httpcore/_sync/base.py,sha256=HeUz5H5t_WN4GDpwhz6hCsgL75JJnXwo8Jn9Ms3m1NM,3167 +httpcore/_sync/connection.py,sha256=DeE7z9ky3CyQUl9lD72O2bcawzv-zKbZ7RTq6UrRe4A,8231 +httpcore/_sync/connection_pool.py,sha256=6cUbHjaK5cfs4rWVN7F4hOxk2IxIp1C5bfVHieSINlM,12866 +httpcore/_sync/http.py,sha256=Dhcrb6AqgHyh18QFq1NysUS-6W5z6-guFMwwC6lVwAg,1274 +httpcore/_sync/http11.py,sha256=hhlEv95rfDr-vJW5OSwTvqthkGNYH9a6jc6p1RrGoJ8,9209 +httpcore/_sync/http2.py,sha256=JdLSySBTzkOnZ4KQzfaQOZYrsinHeTScJnuKBEyfGP4,16727 +httpcore/_sync/http_proxy.py,sha256=p8zuucWqny1nhP3qVPmGdUwUF8jNq2Yf-IM6S5Bf-QE,9869 +httpcore/_threadlock.py,sha256=Xc-WeI8tDh2Ivt7Chblv3HmhbBgZXKMo5SMneXjZDCE,813 +httpcore/_types.py,sha256=97NJ04exPaPoYZB_y4eV4qYfqeyr9XE-zYqkGEAaGuI,331 +httpcore/_utils.py,sha256=goElgq6cnQR0HSJI32taOi-gAJKO3Lr_kCJ0VHPv-XM,3691 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/top_level.txt new file mode 100644 index 0000000..613e435 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore-0.13.7.dist-info/top_level.txt @@ -0,0 +1,4 @@ +httpcore +httpcore/_async +httpcore/_backends +httpcore/_sync diff --git a/.venv/lib/python3.9/site-packages/httpcore/__init__.py b/.venv/lib/python3.9/site-packages/httpcore/__init__.py new file mode 100644 index 0000000..3ddc6d6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/__init__.py @@ -0,0 +1,63 @@ +from ._async.base import AsyncByteStream, AsyncHTTPTransport +from ._async.connection_pool import AsyncConnectionPool +from ._async.http_proxy import AsyncHTTPProxy +from ._bytestreams import AsyncIteratorByteStream, ByteStream, IteratorByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._sync.base import SyncByteStream, SyncHTTPTransport +from ._sync.connection_pool import SyncConnectionPool +from ._sync.http_proxy import SyncHTTPProxy + +__all__ = [ + "AsyncByteStream", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTPTransport", + "AsyncIteratorByteStream", + "ByteStream", + "CloseError", + "ConnectError", + "ConnectTimeout", + "IteratorByteStream", + "LocalProtocolError", + "NetworkError", + "PoolTimeout", + "ProtocolError", + "ProxyError", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "SyncByteStream", + "SyncConnectionPool", + "SyncHTTPProxy", + "SyncHTTPTransport", + "TimeoutException", + "UnsupportedProtocol", + "WriteError", + "WriteTimeout", +] +__version__ = "0.13.7" + +__locals = locals() + +for _name in __all__: + if not _name.startswith("__"): + # Save original source module, used by Sphinx. + __locals[_name].__source_module__ = __locals[_name].__module__ + # Override module for prettier repr(). + setattr(__locals[_name], "__module__", "httpcore") # noqa diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/__init__.py b/.venv/lib/python3.9/site-packages/httpcore/_async/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/base.py b/.venv/lib/python3.9/site-packages/httpcore/_async/base.py new file mode 100644 index 0000000..2b3961c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/base.py @@ -0,0 +1,122 @@ +import enum +from types import TracebackType +from typing import AsyncIterator, Tuple, Type + +from .._types import URL, Headers, T + + +class NewConnectionRequired(Exception): + pass + + +class ConnectionState(enum.IntEnum): + """ + PENDING READY + | | ^ + v V | + ACTIVE | + | | | + | V | + V IDLE-+ + FULL | + | | + V V + CLOSED + """ + + PENDING = 0 # Connection not yet acquired. + READY = 1 # Re-acquired from pool, about to send a request. + ACTIVE = 2 # Active requests. + FULL = 3 # Active requests, no more stream IDs available. + IDLE = 4 # No active requests. + CLOSED = 5 # Connection closed. + + +class AsyncByteStream: + """ + The base interface for request and response bodies. + + Concrete implementations should subclass this class, and implement + the :meth:`__aiter__` method, and optionally the :meth:`aclose` method. + """ + + async def __aiter__(self) -> AsyncIterator[bytes]: + """ + Yield bytes representing the request or response body. + """ + yield b"" # pragma: nocover + + async def aclose(self) -> None: + """ + Must be called by the client to indicate that the stream has been closed. + """ + pass # pragma: nocover + + async def aread(self) -> bytes: + try: + return b"".join([part async for part in self]) + finally: + await self.aclose() + + +class AsyncHTTPTransport: + """ + The base interface for sending HTTP requests. + + Concrete implementations should subclass this class, and implement + the :meth:`handle_async_request` method, and optionally the :meth:`aclose` method. + """ + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + The interface for sending a single HTTP request, and returning a response. + + Parameters + ---------- + method: + The HTTP method, such as ``b'GET'``. + url: + The URL as a 4-tuple of (scheme, host, port, path). + headers: + Any HTTP headers to send with the request. + stream: + The body of the HTTP request. + extensions: + A dictionary of optional extensions. + + Returns + ------- + status_code: + The HTTP status code, such as ``200``. + headers: + Any HTTP headers included on the response. + stream: + The body of the HTTP response. + extensions: + A dictionary of optional extensions. + """ + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + """ + Close the implementation, which should close any outstanding response streams, + and any keep alive connections. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + await self.aclose() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/connection.py b/.venv/lib/python3.9/site-packages/httpcore/_async/connection.py new file mode 100644 index 0000000..2add4d8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/connection.py @@ -0,0 +1,220 @@ +from ssl import SSLContext +from typing import List, Optional, Tuple, cast + +from .._backends.auto import AsyncBackend, AsyncLock, AsyncSocketStream, AutoBackend +from .._exceptions import ConnectError, ConnectTimeout +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import exponential_backoff, get_logger, url_to_origin +from .base import AsyncByteStream, AsyncHTTPTransport, NewConnectionRequired +from .http import AsyncBaseHTTPConnection +from .http11 import AsyncHTTP11Connection + +logger = get_logger(__name__) + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +class AsyncHTTPConnection(AsyncHTTPTransport): + def __init__( + self, + origin: Origin, + http1: bool = True, + http2: bool = False, + keepalive_expiry: float = None, + uds: str = None, + ssl_context: SSLContext = None, + socket: AsyncSocketStream = None, + local_address: str = None, + retries: int = 0, + backend: AsyncBackend = None, + ): + self.origin = origin + self._http1_enabled = http1 + self._http2_enabled = http2 + self._keepalive_expiry = keepalive_expiry + self._uds = uds + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self.socket = socket + self._local_address = local_address + self._retries = retries + + alpn_protocols: List[str] = [] + if http1: + alpn_protocols.append("http/1.1") + if http2: + alpn_protocols.append("h2") + + self._ssl_context.set_alpn_protocols(alpn_protocols) + + self.connection: Optional[AsyncBaseHTTPConnection] = None + self._is_http11 = False + self._is_http2 = False + self._connect_failed = False + self._expires_at: Optional[float] = None + self._backend = AutoBackend() if backend is None else backend + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + if self.connection is None: + return "Connection failed" if self._connect_failed else "Connecting" + return self.connection.info() + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + This occurs when any of the following occur: + + * There are no active requests on an HTTP/1.1 connection, and the underlying + socket is readable. The only valid state the socket can be readable in + if this occurs is when the b"" EOF marker is about to be returned, + indicating a server disconnect. + * There are no active requests being made and the keepalive timeout has passed. + """ + if self.connection is None: + return False + return self.connection.should_close() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + if self.connection is None: + return False + return self.connection.is_idle() + + def is_closed(self) -> bool: + if self.connection is None: + return self._connect_failed + return self.connection.is_closed() + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not currently be exceeding the maximum number of allowable concurrent + streams and must not have exhausted the maximum total number of stream IDs. + """ + if self.connection is None: + return self._http2_enabled and not self.is_closed + return self.connection.is_available() + + @property + def request_lock(self) -> AsyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_request_lock"): + self._request_lock = self._backend.create_lock() + return self._request_lock + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + assert url_to_origin(url) == self.origin + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + async with self.request_lock: + if self.connection is None: + if self._connect_failed: + raise NewConnectionRequired() + if not self.socket: + logger.trace( + "open_socket origin=%r timeout=%r", self.origin, timeout + ) + self.socket = await self._open_socket(timeout) + self._create_connection(self.socket) + elif not self.connection.is_available(): + raise NewConnectionRequired() + + assert self.connection is not None + logger.trace( + "connection.handle_async_request method=%r url=%r headers=%r", + method, + url, + headers, + ) + return await self.connection.handle_async_request( + method, url, headers, stream, extensions + ) + + async def _open_socket(self, timeout: TimeoutDict = None) -> AsyncSocketStream: + scheme, hostname, port = self.origin + timeout = {} if timeout is None else timeout + ssl_context = self._ssl_context if scheme == b"https" else None + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + return await self._backend.open_tcp_stream( + hostname, + port, + ssl_context, + timeout, + local_address=self._local_address, + ) + else: + return await self._backend.open_uds_stream( + self._uds, hostname, ssl_context, timeout + ) + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + self._connect_failed = True + raise + retries_left -= 1 + delay = next(delays) + await self._backend.sleep(delay) + except Exception: # noqa: PIE786 + self._connect_failed = True + raise + + def _create_connection(self, socket: AsyncSocketStream) -> None: + http_version = socket.get_http_version() + logger.trace( + "create_connection socket=%r http_version=%r", socket, http_version + ) + if http_version == "HTTP/2" or ( + self._http2_enabled and not self._http1_enabled + ): + from .http2 import AsyncHTTP2Connection + + self._is_http2 = True + self.connection = AsyncHTTP2Connection( + socket=socket, + keepalive_expiry=self._keepalive_expiry, + backend=self._backend, + ) + else: + self._is_http11 = True + self.connection = AsyncHTTP11Connection( + socket=socket, keepalive_expiry=self._keepalive_expiry + ) + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> None: + if self.connection is not None: + logger.trace("start_tls hostname=%r timeout=%r", hostname, timeout) + self.socket = await self.connection.start_tls( + hostname, ssl_context, timeout + ) + logger.trace("start_tls complete hostname=%r timeout=%r", hostname, timeout) + + async def aclose(self) -> None: + async with self.request_lock: + if self.connection is not None: + await self.connection.aclose() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/connection_pool.py b/.venv/lib/python3.9/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 0000000..0902ac2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,362 @@ +import warnings +from ssl import SSLContext +from typing import ( + AsyncIterator, + Callable, + Dict, + List, + Optional, + Set, + Tuple, + Union, + cast, +) + +from .._backends.auto import AsyncBackend, AsyncLock, AsyncSemaphore +from .._backends.base import lookup_async_backend +from .._exceptions import LocalProtocolError, PoolTimeout, UnsupportedProtocol +from .._threadlock import ThreadLock +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import get_logger, origin_to_url_string, url_to_origin +from .base import AsyncByteStream, AsyncHTTPTransport, NewConnectionRequired +from .connection import AsyncHTTPConnection + +logger = get_logger(__name__) + + +class NullSemaphore(AsyncSemaphore): + def __init__(self) -> None: + pass + + async def acquire(self, timeout: float = None) -> None: + return + + async def release(self) -> None: + return + + +class ResponseByteStream(AsyncByteStream): + def __init__( + self, + stream: AsyncByteStream, + connection: AsyncHTTPConnection, + callback: Callable, + ) -> None: + """ + A wrapper around the response stream that we return from + `.handle_async_request()`. + + Ensures that when `stream.aclose()` is called, the connection pool + is notified via a callback. + """ + self.stream = stream + self.connection = connection + self.callback = callback + + async def __aiter__(self) -> AsyncIterator[bytes]: + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + try: + # Call the underlying stream close callback. + # This will be a call to `AsyncHTTP11Connection._response_closed()` + # or `AsyncHTTP2Stream._response_closed()`. + await self.stream.aclose() + finally: + # Call the connection pool close callback. + # This will be a call to `AsyncConnectionPool._response_closed()`. + await self.callback(self.connection) + + +class AsyncConnectionPool(AsyncHTTPTransport): + """ + A connection pool for making HTTP requests. + + Parameters + ---------- + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + keepalive_expiry: + The maximum time to allow before closing a keep-alive connection. + http1: + Enable/Disable HTTP/1.1 support. Defaults to True. + http2: + Enable/Disable HTTP/2 support. Defaults to False. + uds: + Path to a Unix Domain Socket to use instead of TCP sockets. + local_address: + Local address to connect from. Can also be used to connect using a particular + address family. Using ``local_address="0.0.0.0"`` will connect using an + ``AF_INET`` address (IPv4), while using ``local_address="::"`` will connect + using an ``AF_INET6`` address (IPv6). + retries: + The maximum number of retries when trying to establish a connection. + backend: + A name indicating which concurrency backend to use. + """ + + def __init__( + self, + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http1: bool = True, + http2: bool = False, + uds: str = None, + local_address: str = None, + retries: int = 0, + max_keepalive: int = None, + backend: Union[AsyncBackend, str] = "auto", + ): + if max_keepalive is not None: + warnings.warn( + "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.", + DeprecationWarning, + ) + max_keepalive_connections = max_keepalive + + if isinstance(backend, str): + backend = lookup_async_backend(backend) + + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self._max_connections = max_connections + self._max_keepalive_connections = max_keepalive_connections + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._uds = uds + self._local_address = local_address + self._retries = retries + self._connections: Dict[Origin, Set[AsyncHTTPConnection]] = {} + self._thread_lock = ThreadLock() + self._backend = backend + self._next_keepalive_check = 0.0 + + if not (http1 or http2): + raise ValueError("Either http1 or http2 must be True.") + + if http2: + try: + import h2 # noqa: F401 + except ImportError: + raise ImportError( + "Attempted to use http2=True, but the 'h2' " + "package is not installed. Use 'pip install httpcore[http2]'." + ) + + @property + def _connection_semaphore(self) -> AsyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_internal_semaphore"): + if self._max_connections is not None: + self._internal_semaphore = self._backend.create_semaphore( + self._max_connections, exc_class=PoolTimeout + ) + else: + self._internal_semaphore = NullSemaphore() + + return self._internal_semaphore + + @property + def _connection_acquiry_lock(self) -> AsyncLock: + if not hasattr(self, "_internal_connection_acquiry_lock"): + self._internal_connection_acquiry_lock = self._backend.create_lock() + return self._internal_connection_acquiry_lock + + def _create_connection( + self, + origin: Tuple[bytes, bytes, int], + ) -> AsyncHTTPConnection: + return AsyncHTTPConnection( + origin=origin, + http1=self._http1, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + uds=self._uds, + ssl_context=self._ssl_context, + local_address=self._local_address, + retries=self._retries, + backend=self._backend, + ) + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + if not url[0]: + raise UnsupportedProtocol( + "Request URL missing either an 'http://' or 'https://' protocol." + ) + + if url[0] not in (b"http", b"https"): + protocol = url[0].decode("ascii") + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{protocol}://'." + ) + + if not url[1]: + raise LocalProtocolError("Missing hostname in URL.") + + origin = url_to_origin(url) + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + await self._keepalive_sweep() + + connection: Optional[AsyncHTTPConnection] = None + while connection is None: + async with self._connection_acquiry_lock: + # We get-or-create a connection as an atomic operation, to ensure + # that HTTP/2 requests issued in close concurrency will end up + # on the same connection. + logger.trace("get_connection_from_pool=%r", origin) + connection = await self._get_connection_from_pool(origin) + + if connection is None: + connection = self._create_connection(origin=origin) + logger.trace("created connection=%r", connection) + await self._add_to_pool(connection, timeout=timeout) + else: + logger.trace("reuse connection=%r", connection) + + try: + response = await connection.handle_async_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + except NewConnectionRequired: + connection = None + except BaseException: # noqa: PIE786 + # See https://github.com/encode/httpcore/pull/305 for motivation + # behind catching 'BaseException' rather than 'Exception' here. + logger.trace("remove from pool connection=%r", connection) + await self._remove_from_pool(connection) + raise + + status_code, headers, stream, extensions = response + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + return status_code, headers, wrapped_stream, extensions + + async def _get_connection_from_pool( + self, origin: Origin + ) -> Optional[AsyncHTTPConnection]: + # Determine expired keep alive connections on this origin. + reuse_connection = None + connections_to_close = set() + + for connection in self._connections_for_origin(origin): + if connection.should_close(): + connections_to_close.add(connection) + await self._remove_from_pool(connection) + elif connection.is_available(): + reuse_connection = connection + + # Close any dropped connections. + for connection in connections_to_close: + await connection.aclose() + + return reuse_connection + + async def _response_closed(self, connection: AsyncHTTPConnection) -> None: + remove_from_pool = False + close_connection = False + + if connection.is_closed(): + remove_from_pool = True + elif connection.is_idle(): + num_connections = len(self._get_all_connections()) + if ( + self._max_keepalive_connections is not None + and num_connections > self._max_keepalive_connections + ): + remove_from_pool = True + close_connection = True + + if remove_from_pool: + await self._remove_from_pool(connection) + + if close_connection: + await connection.aclose() + + async def _keepalive_sweep(self) -> None: + """ + Remove any IDLE connections that have expired past their keep-alive time. + """ + if self._keepalive_expiry is None: + return + + now = await self._backend.time() + if now < self._next_keepalive_check: + return + + self._next_keepalive_check = now + min(1.0, self._keepalive_expiry) + connections_to_close = set() + + for connection in self._get_all_connections(): + if connection.should_close(): + connections_to_close.add(connection) + await self._remove_from_pool(connection) + + for connection in connections_to_close: + await connection.aclose() + + async def _add_to_pool( + self, connection: AsyncHTTPConnection, timeout: TimeoutDict + ) -> None: + logger.trace("adding connection to pool=%r", connection) + await self._connection_semaphore.acquire(timeout=timeout.get("pool", None)) + async with self._thread_lock: + self._connections.setdefault(connection.origin, set()) + self._connections[connection.origin].add(connection) + + async def _remove_from_pool(self, connection: AsyncHTTPConnection) -> None: + logger.trace("removing connection from pool=%r", connection) + async with self._thread_lock: + if connection in self._connections.get(connection.origin, set()): + await self._connection_semaphore.release() + self._connections[connection.origin].remove(connection) + if not self._connections[connection.origin]: + del self._connections[connection.origin] + + def _connections_for_origin(self, origin: Origin) -> Set[AsyncHTTPConnection]: + return set(self._connections.get(origin, set())) + + def _get_all_connections(self) -> Set[AsyncHTTPConnection]: + connections: Set[AsyncHTTPConnection] = set() + for connection_set in self._connections.values(): + connections |= connection_set + return connections + + async def aclose(self) -> None: + connections = self._get_all_connections() + for connection in connections: + await self._remove_from_pool(connection) + + # Close all connections + for connection in connections: + await connection.aclose() + + async def get_connection_info(self) -> Dict[str, List[str]]: + """ + Returns a dict of origin URLs to a list of summary strings for each connection. + """ + await self._keepalive_sweep() + + stats = {} + for origin, connections in self._connections.items(): + stats[origin_to_url_string(origin)] = sorted( + [connection.info() for connection in connections] + ) + return stats diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/http.py b/.venv/lib/python3.9/site-packages/httpcore/_async/http.py new file mode 100644 index 0000000..06270f0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/http.py @@ -0,0 +1,42 @@ +from ssl import SSLContext + +from .._backends.auto import AsyncSocketStream +from .._types import TimeoutDict +from .base import AsyncHTTPTransport + + +class AsyncBaseHTTPConnection(AsyncHTTPTransport): + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> AsyncSocketStream: + """ + Upgrade the underlying socket to TLS. + """ + raise NotImplementedError() # pragma: nocover diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/http11.py b/.venv/lib/python3.9/site-packages/httpcore/_async/http11.py new file mode 100644 index 0000000..a265657 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/http11.py @@ -0,0 +1,269 @@ +import enum +import time +from ssl import SSLContext +from typing import AsyncIterator, List, Optional, Tuple, Union, cast + +import h11 + +from .._backends.auto import AsyncSocketStream +from .._bytestreams import AsyncIteratorByteStream +from .._exceptions import LocalProtocolError, RemoteProtocolError, map_exceptions +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import AsyncByteStream, NewConnectionRequired +from .http import AsyncBaseHTTPConnection + +H11Event = Union[ + h11.Request, + h11.Response, + h11.InformationalResponse, + h11.Data, + h11.EndOfMessage, + h11.ConnectionClosed, +] + + +class ConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +logger = get_logger(__name__) + + +class AsyncHTTP11Connection(AsyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + + def __init__(self, socket: AsyncSocketStream, keepalive_expiry: float = None): + self.socket = socket + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._h11_state = h11.Connection(our_role=h11.CLIENT) + self._state = ConnectionState.NEW + + def __repr__(self) -> str: + return f"" + + def _now(self) -> float: + return time.monotonic() + + def _server_disconnected(self) -> bool: + """ + Return True if the connection is idle, and the underlying socket is readable. + The only valid state the socket can be readable here is when the b"" + EOF marker is about to be returned, indicating a server disconnect. + """ + return self._state == ConnectionState.IDLE and self.socket.is_readable() + + def _keepalive_expired(self) -> bool: + """ + Return True if the connection is idle, and has passed it's keepalive + expiry time. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def info(self) -> str: + return f"HTTP/1.1, {self._state.name}" + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + return self._server_disconnected() or self._keepalive_expired() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + return self._state == ConnectionState.IDLE + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + Send a single HTTP/1.1 request. + + Note that there is no kind of task/thread locking at this layer of interface. + Dealing with locking for concurrency is handled by the `AsyncHTTPConnection`. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + if self._state in (ConnectionState.NEW, ConnectionState.IDLE): + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + else: + raise NewConnectionRequired() + + await self._send_request(method, url, headers, timeout) + await self._send_request_body(stream, timeout) + ( + http_version, + status_code, + reason_phrase, + headers, + ) = await self._receive_response(timeout) + response_stream = AsyncIteratorByteStream( + aiterator=self._receive_response_data(timeout), + aclose_func=self._response_closed, + ) + extensions = { + "http_version": http_version, + "reason_phrase": reason_phrase, + } + return (status_code, headers, response_stream, extensions) + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> AsyncSocketStream: + timeout = {} if timeout is None else timeout + self.socket = await self.socket.start_tls(hostname, ssl_context, timeout) + return self.socket + + async def _send_request( + self, method: bytes, url: URL, headers: Headers, timeout: TimeoutDict + ) -> None: + """ + Send the request line and headers. + """ + logger.trace("send_request method=%r url=%r headers=%s", method, url, headers) + _scheme, _host, _port, target = url + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request(method=method, target=target, headers=headers) + await self._send_event(event, timeout) + + async def _send_request_body( + self, stream: AsyncByteStream, timeout: TimeoutDict + ) -> None: + """ + Send the request body. + """ + # Send the request body. + async for chunk in stream: + logger.trace("send_data=Data(<%d bytes>)", len(chunk)) + event = h11.Data(data=chunk) + await self._send_event(event, timeout) + + # Finalize sending the request. + event = h11.EndOfMessage() + await self._send_event(event, timeout) + + async def _send_event(self, event: H11Event, timeout: TimeoutDict) -> None: + """ + Send a single `h11` event to the network, waiting for the data to + drain before returning. + """ + bytes_to_send = self._h11_state.send(event) + await self.socket.write(bytes_to_send, timeout) + + async def _receive_response( + self, timeout: TimeoutDict + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = await self._receive_event(timeout) + if isinstance(event, h11.Response): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + return http_version, event.status_code, event.reason, headers + + async def _receive_response_data( + self, timeout: TimeoutDict + ) -> AsyncIterator[bytes]: + """ + Read the response data from the network. + """ + while True: + event = await self._receive_event(timeout) + if isinstance(event, h11.Data): + logger.trace("receive_event=Data(<%d bytes>)", len(event.data)) + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + logger.trace("receive_event=%r", event) + break + + async def _receive_event(self, timeout: TimeoutDict) -> H11Event: + """ + Read a single `h11` event, reading more data from the network if needed. + """ + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self.socket.read(self.READ_NUM_BYTES, timeout) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle messaging for this case distinctly. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + assert event is not h11.NEED_DATA + break + return event + + async def _response_closed(self) -> None: + logger.trace( + "response_closed our_state=%r their_state=%r", + self._h11_state.our_state, + self._h11_state.their_state, + ) + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._h11_state.start_next_cycle() + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = self._now() + self._keepalive_expiry + else: + await self.aclose() + + async def aclose(self) -> None: + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + if self._h11_state.our_state is h11.MUST_CLOSE: + event = h11.ConnectionClosed() + self._h11_state.send(event) + + await self.socket.aclose() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/http2.py b/.venv/lib/python3.9/site-packages/httpcore/_async/http2.py new file mode 100644 index 0000000..35a4e09 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/http2.py @@ -0,0 +1,446 @@ +import enum +import time +from ssl import SSLContext +from typing import AsyncIterator, Dict, List, Optional, Tuple, cast + +import h2.connection +import h2.events +from h2.config import H2Configuration +from h2.exceptions import NoAvailableStreamIDError +from h2.settings import SettingCodes, Settings + +from .._backends.auto import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream +from .._bytestreams import AsyncIteratorByteStream +from .._exceptions import LocalProtocolError, PoolTimeout, RemoteProtocolError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import AsyncByteStream, NewConnectionRequired +from .http import AsyncBaseHTTPConnection + +logger = get_logger(__name__) + + +class ConnectionState(enum.IntEnum): + IDLE = 0 + ACTIVE = 1 + CLOSED = 2 + + +class AsyncHTTP2Connection(AsyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + CONFIG = H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + socket: AsyncSocketStream, + backend: AsyncBackend, + keepalive_expiry: float = None, + ): + self.socket = socket + + self._backend = backend + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + + self._sent_connection_init = False + self._streams: Dict[int, AsyncHTTP2Stream] = {} + self._events: Dict[int, List[h2.events.Event]] = {} + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._state = ConnectionState.ACTIVE + self._exhausted_available_stream_ids = False + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + return f"HTTP/2, {self._state.name}, {len(self._streams)} streams" + + def _now(self) -> float: + return time.monotonic() + + def should_close(self) -> bool: + """ + Return `True` if the connection is currently idle, and the keepalive + timeout has passed. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not have exhausted the maximum total number of stream IDs. + """ + return ( + self._state != ConnectionState.CLOSED + and not self._exhausted_available_stream_ids + ) + + @property + def init_lock(self) -> AsyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_initialization_lock"): + self._initialization_lock = self._backend.create_lock() + return self._initialization_lock + + @property + def read_lock(self) -> AsyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_read_lock"): + self._read_lock = self._backend.create_lock() + return self._read_lock + + @property + def max_streams_semaphore(self) -> AsyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_max_streams_semaphore"): + max_streams = self._h2_state.local_settings.max_concurrent_streams + self._max_streams_semaphore = self._backend.create_semaphore( + max_streams, exc_class=PoolTimeout + ) + return self._max_streams_semaphore + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> AsyncSocketStream: + raise NotImplementedError("TLS upgrade not supported on HTTP/2 connections.") + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + async with self.init_lock: + if not self._sent_connection_init: + # The very first stream is responsible for initiating the connection. + self._state = ConnectionState.ACTIVE + await self.send_connection_init(timeout) + self._sent_connection_init = True + + await self.max_streams_semaphore.acquire() + try: + try: + stream_id = self._h2_state.get_next_available_stream_id() + except NoAvailableStreamIDError: + self._exhausted_available_stream_ids = True + raise NewConnectionRequired() + else: + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + + h2_stream = AsyncHTTP2Stream(stream_id=stream_id, connection=self) + self._streams[stream_id] = h2_stream + self._events[stream_id] = [] + return await h2_stream.handle_async_request( + method, url, headers, stream, extensions + ) + except Exception: # noqa: PIE786 + await self.max_streams_semaphore.release() + raise + + async def send_connection_init(self, timeout: TimeoutDict) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + SettingCodes.MAX_CONCURRENT_STREAMS: 100, + SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + logger.trace("initiate_connection=%r", self) + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2 ** 24) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + def is_socket_readable(self) -> bool: + return self.socket.is_readable() + + async def aclose(self) -> None: + logger.trace("close_connection=%r", self) + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + await self.socket.aclose() + + async def wait_for_outgoing_flow(self, stream_id: int, timeout: TimeoutDict) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + while flow == 0: + await self.receive_events(timeout) + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + return flow + + async def wait_for_event( + self, stream_id: int, timeout: TimeoutDict + ) -> h2.events.Event: + """ + Returns the next event for a given stream. + If no events are available yet, then waits on the network until + an event is available. + """ + async with self.read_lock: + while not self._events[stream_id]: + await self.receive_events(timeout) + return self._events[stream_id].pop(0) + + async def receive_events(self, timeout: TimeoutDict) -> None: + """ + Read some data from the network, and update the H2 state. + """ + data = await self.socket.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + + events = self._h2_state.receive_data(data) + for event in events: + event_stream_id = getattr(event, "stream_id", 0) + logger.trace("receive_event stream_id=%r event=%s", event_stream_id, event) + + if hasattr(event, "error_code"): + raise RemoteProtocolError(event) + + if event_stream_id in self._events: + self._events[event_stream_id].append(event) + + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def send_headers( + self, stream_id: int, headers: Headers, end_stream: bool, timeout: TimeoutDict + ) -> None: + logger.trace("send_headers stream_id=%r headers=%r", stream_id, headers) + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2 ** 24, stream_id=stream_id) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def send_data( + self, stream_id: int, chunk: bytes, timeout: TimeoutDict + ) -> None: + logger.trace("send_data stream_id=%r chunk=%r", stream_id, chunk) + self._h2_state.send_data(stream_id, chunk) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def end_stream(self, stream_id: int, timeout: TimeoutDict) -> None: + logger.trace("end_stream stream_id=%r", stream_id) + self._h2_state.end_stream(stream_id) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def acknowledge_received_data( + self, stream_id: int, amount: int, timeout: TimeoutDict + ) -> None: + self._h2_state.acknowledge_received_data(amount, stream_id) + data_to_send = self._h2_state.data_to_send() + await self.socket.write(data_to_send, timeout) + + async def close_stream(self, stream_id: int) -> None: + try: + logger.trace("close_stream stream_id=%r", stream_id) + del self._streams[stream_id] + del self._events[stream_id] + + if not self._streams: + if self._state == ConnectionState.ACTIVE: + if self._exhausted_available_stream_ids: + await self.aclose() + else: + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = ( + self._now() + self._keepalive_expiry + ) + finally: + await self.max_streams_semaphore.release() + + +class AsyncHTTP2Stream: + def __init__(self, stream_id: int, connection: AsyncHTTP2Connection) -> None: + self.stream_id = stream_id + self.connection = connection + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + headers = [(k.lower(), v) for (k, v) in headers] + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + # Send the request. + seen_headers = set(key for key, value in headers) + has_body = ( + b"content-length" in seen_headers or b"transfer-encoding" in seen_headers + ) + + await self.send_headers(method, url, headers, has_body, timeout) + if has_body: + await self.send_body(stream, timeout) + + # Receive the response. + status_code, headers = await self.receive_response(timeout) + response_stream = AsyncIteratorByteStream( + aiterator=self.body_iter(timeout), aclose_func=self._response_closed + ) + + extensions = { + "http_version": b"HTTP/2", + } + return (status_code, headers, response_stream, extensions) + + async def send_headers( + self, + method: bytes, + url: URL, + headers: Headers, + has_body: bool, + timeout: TimeoutDict, + ) -> None: + scheme, hostname, port, path = url + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = None + + for k, v in headers: + if k == b"host": + authority = v + break + + if authority is None: + # Mirror the same error we'd see with `h11`, so that the behaviour + # is consistent. Although we're dealing with an `:authority` + # pseudo-header by this point, from an end-user perspective the issue + # is that the outgoing request needed to include a `host` header. + raise LocalProtocolError("Missing mandatory Host: header") + + headers = [ + (b":method", method), + (b":authority", authority), + (b":scheme", scheme), + (b":path", path), + ] + [ + (k, v) + for k, v in headers + if k + not in ( + b"host", + b"transfer-encoding", + ) + ] + end_stream = not has_body + + await self.connection.send_headers(self.stream_id, headers, end_stream, timeout) + + async def send_body(self, stream: AsyncByteStream, timeout: TimeoutDict) -> None: + async for data in stream: + while data: + max_flow = await self.connection.wait_for_outgoing_flow( + self.stream_id, timeout + ) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + await self.connection.send_data(self.stream_id, chunk, timeout) + + await self.connection.end_stream(self.stream_id, timeout) + + async def receive_response( + self, timeout: TimeoutDict + ) -> Tuple[int, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = await self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def body_iter(self, timeout: TimeoutDict) -> AsyncIterator[bytes]: + while True: + event = await self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + await self.connection.acknowledge_received_data( + self.stream_id, amount, timeout + ) + yield event.data + elif isinstance(event, (h2.events.StreamEnded, h2.events.StreamReset)): + break + + async def _response_closed(self) -> None: + await self.connection.close_stream(self.stream_id) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_async/http_proxy.py b/.venv/lib/python3.9/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 0000000..275bf21 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,290 @@ +from http import HTTPStatus +from ssl import SSLContext +from typing import Tuple, cast + +from .._bytestreams import ByteStream +from .._exceptions import ProxyError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger, url_to_origin +from .base import AsyncByteStream +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool, ResponseByteStream + +logger = get_logger(__name__) + + +def get_reason_phrase(status_code: int) -> str: + try: + return HTTPStatus(status_code).phrase + except ValueError: + return "" + + +def merge_headers( + default_headers: Headers = None, override_headers: Headers = None +) -> Headers: + """ + Append default_headers and override_headers, de-duplicating if a key existing in + both cases. + """ + default_headers = [] if default_headers is None else default_headers + override_headers = [] if override_headers is None else override_headers + has_override = set([key.lower() for key, value in override_headers]) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class AsyncHTTPProxy(AsyncConnectionPool): + """ + A connection pool for making HTTP requests via an HTTP proxy. + + Parameters + ---------- + proxy_url: + The URL of the proxy service as a 4-tuple of (scheme, host, port, path). + proxy_headers: + A list of proxy headers to include. + proxy_mode: + A proxy mode to operate in. May be "DEFAULT", "FORWARD_ONLY", or "TUNNEL_ONLY". + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + http2: + Enable HTTP/2 support. + """ + + def __init__( + self, + proxy_url: URL, + proxy_headers: Headers = None, + proxy_mode: str = "DEFAULT", + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http2: bool = False, + backend: str = "auto", + # Deprecated argument style: + max_keepalive: int = None, + ): + assert proxy_mode in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY") + + self.proxy_origin = url_to_origin(proxy_url) + self.proxy_headers = [] if proxy_headers is None else proxy_headers + self.proxy_mode = proxy_mode + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http2=http2, + backend=backend, + max_keepalive=max_keepalive, + ) + + async def handle_async_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + if self._keepalive_expiry is not None: + await self._keepalive_sweep() + + if ( + self.proxy_mode == "DEFAULT" and url[0] == b"http" + ) or self.proxy_mode == "FORWARD_ONLY": + # By default HTTP requests should be forwarded. + logger.trace( + "forward_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return await self._forward_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + else: + # By default HTTPS should be tunnelled. + logger.trace( + "tunnel_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return await self._tunnel_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + async def _forward_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + Forwarded proxy requests include the entire URL as the HTTP target, + rather than just the path. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = self.proxy_origin + connection = await self._get_connection_from_pool(origin) + + if connection is None: + connection = AsyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + await self._add_to_pool(connection, timeout) + + # Issue a forwarded proxy request... + + # GET https://www.example.org/path HTTP/1.1 + # [proxy headers] + # [headers] + scheme, host, port, path = url + if port is None: + target = b"%b://%b%b" % (scheme, host, path) + else: + target = b"%b://%b:%d%b" % (scheme, host, port, path) + + url = self.proxy_origin + (target,) + headers = merge_headers(self.proxy_headers, headers) + + ( + status_code, + headers, + stream, + extensions, + ) = await connection.handle_async_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions + + async def _tunnel_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: AsyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, AsyncByteStream, dict]: + """ + Tunnelled proxy requests require an initial CONNECT request to + establish the connection, and then send regular requests. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = url_to_origin(url) + connection = await self._get_connection_from_pool(origin) + + if connection is None: + scheme, host, port = origin + + # First, create a connection to the proxy server + proxy_connection = AsyncHTTPConnection( + origin=self.proxy_origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + + # Issue a CONNECT request... + + # CONNECT www.example.org:80 HTTP/1.1 + # [proxy-headers] + target = b"%b:%d" % (host, port) + connect_url = self.proxy_origin + (target,) + connect_headers = [(b"Host", target), (b"Accept", b"*/*")] + connect_headers = merge_headers(connect_headers, self.proxy_headers) + + try: + ( + proxy_status_code, + _, + proxy_stream, + _, + ) = await proxy_connection.handle_async_request( + b"CONNECT", + connect_url, + headers=connect_headers, + stream=ByteStream(b""), + extensions=extensions, + ) + + proxy_reason = get_reason_phrase(proxy_status_code) + logger.trace( + "tunnel_response proxy_status_code=%r proxy_reason=%r ", + proxy_status_code, + proxy_reason, + ) + # Read the response data without closing the socket + async for _ in proxy_stream: + pass + + # See if the tunnel was successfully established. + if proxy_status_code < 200 or proxy_status_code > 299: + msg = "%d %s" % (proxy_status_code, proxy_reason) + raise ProxyError(msg) + + # Upgrade to TLS if required + # We assume the target speaks TLS on the specified port + if scheme == b"https": + await proxy_connection.start_tls(host, self._ssl_context, timeout) + except Exception as exc: + await proxy_connection.aclose() + raise ProxyError(exc) + + # The CONNECT request is successful, so we have now SWITCHED PROTOCOLS. + # This means the proxy connection is now unusable, and we must create + # a new one for regular requests, making sure to use the same socket to + # retain the tunnel. + connection = AsyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + socket=proxy_connection.socket, + ) + await self._add_to_pool(connection, timeout) + + # Once the connection has been established we can send requests on + # it as normal. + ( + status_code, + headers, + stream, + extensions, + ) = await connection.handle_async_request( + method, + url, + headers=headers, + stream=stream, + extensions=extensions, + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/__init__.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/anyio.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 0000000..b1332a2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,201 @@ +from ssl import SSLContext +from typing import Optional + +import anyio.abc +from anyio import BrokenResourceError, EndOfStream +from anyio.abc import ByteStream, SocketAttribute +from anyio.streams.tls import TLSAttribute, TLSStream + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import is_socket_readable +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + + +class SocketStream(AsyncSocketStream): + def __init__(self, stream: ByteStream) -> None: + self.stream = stream + self.read_lock = anyio.Lock() + self.write_lock = anyio.Lock() + + def get_http_version(self) -> str: + alpn_protocol = self.stream.extra(TLSAttribute.alpn_protocol, None) + return "HTTP/2" if alpn_protocol == "h2" else "HTTP/1.1" + + async def start_tls( + self, + hostname: bytes, + ssl_context: SSLContext, + timeout: TimeoutDict, + ) -> "SocketStream": + connect_timeout = timeout.get("connect") + try: + with anyio.fail_after(connect_timeout): + ssl_stream = await TLSStream.wrap( + self.stream, + ssl_context=ssl_context, + hostname=hostname.decode("ascii"), + standard_compatible=False, + ) + except TimeoutError: + raise ConnectTimeout from None + except BrokenResourceError as exc: + raise ConnectError from exc + + return SocketStream(ssl_stream) + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = timeout.get("read") + async with self.read_lock: + try: + with anyio.fail_after(read_timeout): + return await self.stream.receive(n) + except TimeoutError: + await self.stream.aclose() + raise ReadTimeout from None + except BrokenResourceError as exc: + raise ReadError from exc + except EndOfStream: + return b"" + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + if not data: + return + + write_timeout = timeout.get("write") + async with self.write_lock: + try: + with anyio.fail_after(write_timeout): + return await self.stream.send(data) + except TimeoutError: + await self.stream.aclose() + raise WriteTimeout from None + except BrokenResourceError as exc: + raise WriteError from exc + + async def aclose(self) -> None: + async with self.write_lock: + try: + await self.stream.aclose() + except BrokenResourceError: + pass + + def is_readable(self) -> bool: + sock = self.stream.extra(SocketAttribute.raw_socket) + return is_socket_readable(sock) + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = anyio.Lock() + + async def release(self) -> None: + self._lock.release() + + async def acquire(self) -> None: + await self._lock.acquire() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type): + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> anyio.abc.Semaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = anyio.Semaphore(self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + with anyio.move_on_after(timeout): + await self.semaphore.acquire() + return + + raise self.exc_class() + + async def release(self) -> None: + self.semaphore.release() + + +class AnyIOBackend(AsyncBackend): + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + connect_timeout = timeout.get("connect") + unicode_host = hostname.decode("utf-8") + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with anyio.fail_after(connect_timeout): + stream: anyio.abc.ByteStream + stream = await anyio.connect_tcp( + unicode_host, port, local_host=local_address + ) + if ssl_context: + stream = await TLSStream.wrap( + stream, + hostname=unicode_host, + ssl_context=ssl_context, + standard_compatible=False, + ) + + return SocketStream(stream=stream) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + connect_timeout = timeout.get("connect") + unicode_host = hostname.decode("utf-8") + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with anyio.fail_after(connect_timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + if ssl_context: + stream = await TLSStream.wrap( + stream, + hostname=unicode_host, + ssl_context=ssl_context, + standard_compatible=False, + ) + + return SocketStream(stream=stream) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + return float(anyio.current_time()) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/asyncio.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/asyncio.py new file mode 100644 index 0000000..5142072 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/asyncio.py @@ -0,0 +1,303 @@ +import asyncio +import socket +from ssl import SSLContext +from typing import Optional + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import is_socket_readable +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + +SSL_MONKEY_PATCH_APPLIED = False + + +def ssl_monkey_patch() -> None: + """ + Monkey-patch for https://bugs.python.org/issue36709 + + This prevents console errors when outstanding HTTPS connections + still exist at the point of exiting. + + Clients which have been opened using a `with` block, or which have + had `close()` closed, will not exhibit this issue in the first place. + """ + MonkeyPatch = asyncio.selector_events._SelectorSocketTransport # type: ignore + + _write = MonkeyPatch.write + + def _fixed_write(self, data: bytes) -> None: # type: ignore + if self._loop and not self._loop.is_closed(): + _write(self, data) + + MonkeyPatch.write = _fixed_write + + +async def backport_start_tls( + transport: asyncio.BaseTransport, + protocol: asyncio.BaseProtocol, + ssl_context: SSLContext, + *, + server_side: bool = False, + server_hostname: str = None, + ssl_handshake_timeout: float = None, +) -> asyncio.Transport: # pragma: nocover (Since it's not used on all Python versions.) + """ + Python 3.6 asyncio doesn't have a start_tls() method on the loop + so we use this function in place of the loop's start_tls() method. + Adapted from this comment: + https://github.com/urllib3/urllib3/issues/1323#issuecomment-362494839 + """ + import asyncio.sslproto + + loop = asyncio.get_event_loop() + waiter = loop.create_future() + ssl_protocol = asyncio.sslproto.SSLProtocol( + loop, + protocol, + ssl_context, + waiter, + server_side=False, + server_hostname=server_hostname, + call_connection_made=False, + ) + + transport.set_protocol(ssl_protocol) + loop.call_soon(ssl_protocol.connection_made, transport) + loop.call_soon(transport.resume_reading) # type: ignore + + await waiter + return ssl_protocol._app_transport + + +class SocketStream(AsyncSocketStream): + def __init__( + self, stream_reader: asyncio.StreamReader, stream_writer: asyncio.StreamWriter + ): + self.stream_reader = stream_reader + self.stream_writer = stream_writer + self.read_lock = asyncio.Lock() + self.write_lock = asyncio.Lock() + + def get_http_version(self) -> str: + ssl_object = self.stream_writer.get_extra_info("ssl_object") + + if ssl_object is None: + return "HTTP/1.1" + + ident = ssl_object.selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "SocketStream": + loop = asyncio.get_event_loop() + + stream_reader = asyncio.StreamReader() + protocol = asyncio.StreamReaderProtocol(stream_reader) + transport = self.stream_writer.transport + + loop_start_tls = getattr(loop, "start_tls", backport_start_tls) + + exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} + + with map_exceptions(exc_map): + transport = await asyncio.wait_for( + loop_start_tls( + transport, + protocol, + ssl_context, + server_hostname=hostname.decode("ascii"), + ), + timeout=timeout.get("connect"), + ) + + # Initialize the protocol, so it is made aware of being tied to + # a TLS connection. + # See: https://github.com/encode/httpx/issues/859 + protocol.connection_made(transport) + + stream_writer = asyncio.StreamWriter( + transport=transport, protocol=protocol, reader=stream_reader, loop=loop + ) + + ssl_stream = SocketStream(stream_reader, stream_writer) + # When we return a new SocketStream with new StreamReader/StreamWriter instances + # we need to keep references to the old StreamReader/StreamWriter so that they + # are not garbage collected and closed while we're still using them. + ssl_stream._inner = self # type: ignore + return ssl_stream + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + exc_map = {asyncio.TimeoutError: ReadTimeout, OSError: ReadError} + async with self.read_lock: + with map_exceptions(exc_map): + try: + return await asyncio.wait_for( + self.stream_reader.read(n), timeout.get("read") + ) + except AttributeError as exc: # pragma: nocover + if "resume_reading" in str(exc): + # Python's asyncio has a bug that can occur when a + # connection has been closed, while it is paused. + # See: https://github.com/encode/httpx/issues/1213 + # + # Returning an empty byte-string to indicate connection + # close will eventually raise an httpcore.RemoteProtocolError + # to the user when this goes through our HTTP parsing layer. + return b"" + raise + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + if not data: + return + + exc_map = {asyncio.TimeoutError: WriteTimeout, OSError: WriteError} + async with self.write_lock: + with map_exceptions(exc_map): + self.stream_writer.write(data) + return await asyncio.wait_for( + self.stream_writer.drain(), timeout.get("write") + ) + + async def aclose(self) -> None: + # SSL connections should issue the close and then abort, rather than + # waiting for the remote end of the connection to signal the EOF. + # + # See: + # + # * https://bugs.python.org/issue39758 + # * https://github.com/python-trio/trio/blob/ + # 31e2ae866ad549f1927d45ce073d4f0ea9f12419/trio/_ssl.py#L779-L829 + # + # And related issues caused if we simply omit the 'wait_closed' call, + # without first using `.abort()` + # + # * https://github.com/encode/httpx/issues/825 + # * https://github.com/encode/httpx/issues/914 + is_ssl = self.stream_writer.get_extra_info("ssl_object") is not None + + async with self.write_lock: + try: + self.stream_writer.close() + if is_ssl: + # Give the connection a chance to write any data in the buffer, + # and then forcibly tear down the SSL connection. + await asyncio.sleep(0) + self.stream_writer.transport.abort() # type: ignore + if hasattr(self.stream_writer, "wait_closed"): + # Python 3.7+ only. + await self.stream_writer.wait_closed() # type: ignore + except OSError: + pass + + def is_readable(self) -> bool: + transport = self.stream_reader._transport # type: ignore + sock: Optional[socket.socket] = transport.get_extra_info("socket") + return is_socket_readable(sock) + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = asyncio.Lock() + + async def release(self) -> None: + self._lock.release() + + async def acquire(self) -> None: + await self._lock.acquire() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type) -> None: + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> asyncio.BoundedSemaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = asyncio.BoundedSemaphore(value=self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + try: + await asyncio.wait_for(self.semaphore.acquire(), timeout) + except asyncio.TimeoutError: + raise self.exc_class() + + async def release(self) -> None: + self.semaphore.release() + + +class AsyncioBackend(AsyncBackend): + def __init__(self) -> None: + global SSL_MONKEY_PATCH_APPLIED + + if not SSL_MONKEY_PATCH_APPLIED: + ssl_monkey_patch() + SSL_MONKEY_PATCH_APPLIED = True + + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> SocketStream: + host = hostname.decode("ascii") + connect_timeout = timeout.get("connect") + local_addr = None if local_address is None else (local_address, 0) + + exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} + with map_exceptions(exc_map): + stream_reader, stream_writer = await asyncio.wait_for( + asyncio.open_connection( + host, port, ssl=ssl_context, local_addr=local_addr + ), + connect_timeout, + ) + return SocketStream( + stream_reader=stream_reader, stream_writer=stream_writer + ) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + host = hostname.decode("ascii") + connect_timeout = timeout.get("connect") + kwargs: dict = {"server_hostname": host} if ssl_context is not None else {} + exc_map = {asyncio.TimeoutError: ConnectTimeout, OSError: ConnectError} + with map_exceptions(exc_map): + stream_reader, stream_writer = await asyncio.wait_for( + asyncio.open_unix_connection(path, ssl=ssl_context, **kwargs), + connect_timeout, + ) + return SocketStream( + stream_reader=stream_reader, stream_writer=stream_writer + ) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + loop = asyncio.get_event_loop() + return loop.time() + + async def sleep(self, seconds: float) -> None: + await asyncio.sleep(seconds) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/auto.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/auto.py new file mode 100644 index 0000000..5579ab4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,67 @@ +from ssl import SSLContext +from typing import Optional + +import sniffio + +from .._types import TimeoutDict +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + +# The following line is imported from the _sync modules +from .sync import SyncBackend, SyncLock, SyncSemaphore, SyncSocketStream # noqa + + +class AutoBackend(AsyncBackend): + @property + def backend(self) -> AsyncBackend: + if not hasattr(self, "_backend_implementation"): + backend = sniffio.current_async_library() + + if backend == "asyncio": + from .anyio import AnyIOBackend + + self._backend_implementation: AsyncBackend = AnyIOBackend() + elif backend == "trio": + from .trio import TrioBackend + + self._backend_implementation = TrioBackend() + elif backend == "curio": + from .curio import CurioBackend + + self._backend_implementation = CurioBackend() + else: # pragma: nocover + raise RuntimeError(f"Unsupported concurrency backend {backend!r}") + return self._backend_implementation + + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + return await self.backend.open_tcp_stream( + hostname, port, ssl_context, timeout, local_address=local_address + ) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + return await self.backend.open_uds_stream(path, hostname, ssl_context, timeout) + + def create_lock(self) -> AsyncLock: + return self.backend.create_lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return self.backend.create_semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + return await self.backend.time() + + async def sleep(self, seconds: float) -> None: + await self.backend.sleep(seconds) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/base.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/base.py new file mode 100644 index 0000000..1ca6e31 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/base.py @@ -0,0 +1,137 @@ +from ssl import SSLContext +from types import TracebackType +from typing import TYPE_CHECKING, Optional, Type + +from .._types import TimeoutDict + +if TYPE_CHECKING: # pragma: no cover + from .sync import SyncBackend + + +def lookup_async_backend(name: str) -> "AsyncBackend": + if name == "auto": + from .auto import AutoBackend + + return AutoBackend() + elif name == "asyncio": + from .asyncio import AsyncioBackend + + return AsyncioBackend() + elif name == "trio": + from .trio import TrioBackend + + return TrioBackend() + elif name == "curio": + from .curio import CurioBackend + + return CurioBackend() + elif name == "anyio": + from .anyio import AnyIOBackend + + return AnyIOBackend() + + raise ValueError("Invalid backend name {name!r}") + + +def lookup_sync_backend(name: str) -> "SyncBackend": + from .sync import SyncBackend + + return SyncBackend() + + +class AsyncSocketStream: + """ + A socket stream with read/write operations. Abstracts away any asyncio-specific + interfaces into a more generic base class, that we can use with alternate + backends, or for stand-alone test cases. + """ + + def get_http_version(self) -> str: + raise NotImplementedError() # pragma: no cover + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "AsyncSocketStream": + raise NotImplementedError() # pragma: no cover + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + raise NotImplementedError() # pragma: no cover + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + raise NotImplementedError() # pragma: no cover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: no cover + + def is_readable(self) -> bool: + raise NotImplementedError() # pragma: no cover + + +class AsyncLock: + """ + An abstract interface for Lock classes. + """ + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + await self.release() + + async def release(self) -> None: + raise NotImplementedError() # pragma: no cover + + async def acquire(self) -> None: + raise NotImplementedError() # pragma: no cover + + +class AsyncSemaphore: + """ + An abstract interface for Semaphore classes. + Abstracts away any asyncio-specific interfaces. + """ + + async def acquire(self, timeout: float = None) -> None: + raise NotImplementedError() # pragma: no cover + + async def release(self) -> None: + raise NotImplementedError() # pragma: no cover + + +class AsyncBackend: + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + raise NotImplementedError() # pragma: no cover + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + raise NotImplementedError() # pragma: no cover + + def create_lock(self) -> AsyncLock: + raise NotImplementedError() # pragma: no cover + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + raise NotImplementedError() # pragma: no cover + + async def time(self) -> float: + raise NotImplementedError() # pragma: no cover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: no cover diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/curio.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/curio.py new file mode 100644 index 0000000..99a7b2c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/curio.py @@ -0,0 +1,206 @@ +from ssl import SSLContext, SSLSocket +from typing import Optional + +import curio +import curio.io + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import get_logger, is_socket_readable +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + +logger = get_logger(__name__) + +ONE_DAY_IN_SECONDS = float(60 * 60 * 24) + + +def convert_timeout(value: Optional[float]) -> float: + return value if value is not None else ONE_DAY_IN_SECONDS + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = curio.Lock() + + async def acquire(self) -> None: + await self._lock.acquire() + + async def release(self) -> None: + await self._lock.release() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type) -> None: + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> curio.Semaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = curio.Semaphore(value=self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + timeout = convert_timeout(timeout) + + try: + return await curio.timeout_after(timeout, self.semaphore.acquire()) + except curio.TaskTimeout: + raise self.exc_class() + + async def release(self) -> None: + await self.semaphore.release() + + +class SocketStream(AsyncSocketStream): + def __init__(self, socket: curio.io.Socket) -> None: + self.read_lock = curio.Lock() + self.write_lock = curio.Lock() + self.socket = socket + self.stream = socket.as_stream() + + def get_http_version(self) -> str: + if hasattr(self.socket, "_socket"): + raw_socket = self.socket._socket + + if isinstance(raw_socket, SSLSocket): + ident = raw_socket.selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + + return "HTTP/1.1" + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "AsyncSocketStream": + connect_timeout = convert_timeout(timeout.get("connect")) + exc_map = { + curio.TaskTimeout: ConnectTimeout, + curio.CurioError: ConnectError, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + wrapped_sock = curio.io.Socket( + ssl_context.wrap_socket( + self.socket._socket, + do_handshake_on_connect=False, + server_hostname=hostname.decode("ascii"), + ) + ) + + await curio.timeout_after( + connect_timeout, + wrapped_sock.do_handshake(), + ) + + return SocketStream(wrapped_sock) + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = convert_timeout(timeout.get("read")) + exc_map = { + curio.TaskTimeout: ReadTimeout, + curio.CurioError: ReadError, + OSError: ReadError, + } + + with map_exceptions(exc_map): + async with self.read_lock: + return await curio.timeout_after(read_timeout, self.stream.read(n)) + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + write_timeout = convert_timeout(timeout.get("write")) + exc_map = { + curio.TaskTimeout: WriteTimeout, + curio.CurioError: WriteError, + OSError: WriteError, + } + + with map_exceptions(exc_map): + async with self.write_lock: + await curio.timeout_after(write_timeout, self.stream.write(data)) + + async def aclose(self) -> None: + await self.stream.close() + await self.socket.close() + + def is_readable(self) -> bool: + return is_socket_readable(self.socket) + + +class CurioBackend(AsyncBackend): + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + connect_timeout = convert_timeout(timeout.get("connect")) + exc_map = { + curio.TaskTimeout: ConnectTimeout, + curio.CurioError: ConnectError, + OSError: ConnectError, + } + host = hostname.decode("ascii") + + kwargs: dict = {} + if ssl_context is not None: + kwargs["ssl"] = ssl_context + kwargs["server_hostname"] = host + if local_address is not None: + kwargs["source_addr"] = (local_address, 0) + + with map_exceptions(exc_map): + sock: curio.io.Socket = await curio.timeout_after( + connect_timeout, + curio.open_connection(hostname, port, **kwargs), + ) + + return SocketStream(sock) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + connect_timeout = convert_timeout(timeout.get("connect")) + exc_map = { + curio.TaskTimeout: ConnectTimeout, + curio.CurioError: ConnectError, + OSError: ConnectError, + } + host = hostname.decode("ascii") + kwargs = ( + {} if ssl_context is None else {"ssl": ssl_context, "server_hostname": host} + ) + + with map_exceptions(exc_map): + sock: curio.io.Socket = await curio.timeout_after( + connect_timeout, curio.open_unix_connection(path, **kwargs) + ) + + return SocketStream(sock) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class) + + async def time(self) -> float: + return await curio.clock() + + async def sleep(self, seconds: float) -> None: + await curio.sleep(seconds) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/sync.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/sync.py new file mode 100644 index 0000000..ee8f94b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,178 @@ +import socket +import threading +import time +from ssl import SSLContext +from types import TracebackType +from typing import Optional, Type + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .._utils import is_socket_readable + + +class SyncSocketStream: + """ + A socket stream with read/write operations. Abstracts away any asyncio-specific + interfaces into a more generic base class, that we can use with alternate + backends, or for stand-alone test cases. + """ + + def __init__(self, sock: socket.socket) -> None: + self.sock = sock + self.read_lock = threading.Lock() + self.write_lock = threading.Lock() + + def get_http_version(self) -> str: + selected_alpn_protocol = getattr(self.sock, "selected_alpn_protocol", None) + if selected_alpn_protocol is not None: + ident = selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + return "HTTP/1.1" + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "SyncSocketStream": + connect_timeout = timeout.get("connect") + exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + + with map_exceptions(exc_map): + self.sock.settimeout(connect_timeout) + wrapped = ssl_context.wrap_socket( + self.sock, server_hostname=hostname.decode("ascii") + ) + + return SyncSocketStream(wrapped) + + def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = timeout.get("read") + exc_map = {socket.timeout: ReadTimeout, socket.error: ReadError} + + with self.read_lock: + with map_exceptions(exc_map): + self.sock.settimeout(read_timeout) + return self.sock.recv(n) + + def write(self, data: bytes, timeout: TimeoutDict) -> None: + write_timeout = timeout.get("write") + exc_map = {socket.timeout: WriteTimeout, socket.error: WriteError} + + with self.write_lock: + with map_exceptions(exc_map): + while data: + self.sock.settimeout(write_timeout) + n = self.sock.send(data) + data = data[n:] + + def close(self) -> None: + with self.write_lock: + try: + self.sock.close() + except socket.error: + pass + + def is_readable(self) -> bool: + return is_socket_readable(self.sock) + + +class SyncLock: + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> None: + self.acquire() + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.release() + + def release(self) -> None: + self._lock.release() + + def acquire(self) -> None: + self._lock.acquire() + + +class SyncSemaphore: + def __init__(self, max_value: int, exc_class: type) -> None: + self.max_value = max_value + self.exc_class = exc_class + self._semaphore = threading.Semaphore(max_value) + + def acquire(self, timeout: float = None) -> None: + if not self._semaphore.acquire(timeout=timeout): # type: ignore + raise self.exc_class() + + def release(self) -> None: + self._semaphore.release() + + +class SyncBackend: + def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> SyncSocketStream: + address = (hostname.decode("ascii"), port) + connect_timeout = timeout.get("connect") + source_address = None if local_address is None else (local_address, 0) + exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, connect_timeout, source_address=source_address # type: ignore + ) + if ssl_context is not None: + sock = ssl_context.wrap_socket( + sock, server_hostname=hostname.decode("ascii") + ) + return SyncSocketStream(sock=sock) + + def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> SyncSocketStream: + connect_timeout = timeout.get("connect") + exc_map = {socket.timeout: ConnectTimeout, socket.error: ConnectError} + + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.settimeout(connect_timeout) + sock.connect(path) + + if ssl_context is not None: + sock = ssl_context.wrap_socket( + sock, server_hostname=hostname.decode("ascii") + ) + + return SyncSocketStream(sock=sock) + + def create_lock(self) -> SyncLock: + return SyncLock() + + def create_semaphore(self, max_value: int, exc_class: type) -> SyncSemaphore: + return SyncSemaphore(max_value, exc_class=exc_class) + + def time(self) -> float: + return time.monotonic() + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_backends/trio.py b/.venv/lib/python3.9/site-packages/httpcore/_backends/trio.py new file mode 100644 index 0000000..d6e67c2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,212 @@ +from ssl import SSLContext +from typing import Optional + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._types import TimeoutDict +from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream + + +def none_as_inf(value: Optional[float]) -> float: + return value if value is not None else float("inf") + + +class SocketStream(AsyncSocketStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self.stream = stream + self.read_lock = trio.Lock() + self.write_lock = trio.Lock() + + def get_http_version(self) -> str: + if not isinstance(self.stream, trio.SSLStream): + return "HTTP/1.1" + + ident = self.stream.selected_alpn_protocol() + return "HTTP/2" if ident == "h2" else "HTTP/1.1" + + async def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict + ) -> "SocketStream": + connect_timeout = none_as_inf(timeout.get("connect")) + exc_map = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self.stream, + ssl_context=ssl_context, + server_hostname=hostname.decode("ascii"), + ) + + with map_exceptions(exc_map): + with trio.fail_after(connect_timeout): + await ssl_stream.do_handshake() + return SocketStream(ssl_stream) + + async def read(self, n: int, timeout: TimeoutDict) -> bytes: + read_timeout = none_as_inf(timeout.get("read")) + exc_map = {trio.TooSlowError: ReadTimeout, trio.BrokenResourceError: ReadError} + + async with self.read_lock: + with map_exceptions(exc_map): + try: + with trio.fail_after(read_timeout): + return await self.stream.receive_some(max_bytes=n) + except trio.TooSlowError as exc: + await self.stream.aclose() + raise exc + + async def write(self, data: bytes, timeout: TimeoutDict) -> None: + if not data: + return + + write_timeout = none_as_inf(timeout.get("write")) + exc_map = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + } + + async with self.write_lock: + with map_exceptions(exc_map): + try: + with trio.fail_after(write_timeout): + return await self.stream.send_all(data) + except trio.TooSlowError as exc: + await self.stream.aclose() + raise exc + + async def aclose(self) -> None: + async with self.write_lock: + try: + await self.stream.aclose() + except trio.BrokenResourceError: + pass + + def is_readable(self) -> bool: + # Adapted from: https://github.com/encode/httpx/pull/143#issuecomment-515202982 + stream = self.stream + + # Peek through any SSLStream wrappers to get the underlying SocketStream. + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + + return stream.socket.is_readable() + + +class Lock(AsyncLock): + def __init__(self) -> None: + self._lock = trio.Lock() + + async def release(self) -> None: + self._lock.release() + + async def acquire(self) -> None: + await self._lock.acquire() + + +class Semaphore(AsyncSemaphore): + def __init__(self, max_value: int, exc_class: type): + self.max_value = max_value + self.exc_class = exc_class + + @property + def semaphore(self) -> trio.Semaphore: + if not hasattr(self, "_semaphore"): + self._semaphore = trio.Semaphore(self.max_value, max_value=self.max_value) + return self._semaphore + + async def acquire(self, timeout: float = None) -> None: + timeout = none_as_inf(timeout) + + with trio.move_on_after(timeout): + await self.semaphore.acquire() + return + + raise self.exc_class() + + async def release(self) -> None: + self.semaphore.release() + + +class TrioBackend(AsyncBackend): + async def open_tcp_stream( + self, + hostname: bytes, + port: int, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + *, + local_address: Optional[str], + ) -> AsyncSocketStream: + connect_timeout = none_as_inf(timeout.get("connect")) + # Trio will support local_address from 0.16.1 onwards. + # We only include the keyword argument if a local_address + #  argument has been passed. + kwargs: dict = {} if local_address is None else {"local_address": local_address} + exc_map = { + OSError: ConnectError, + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with trio.fail_after(connect_timeout): + stream: trio.abc.Stream = await trio.open_tcp_stream( + hostname, port, **kwargs + ) + + if ssl_context is not None: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=hostname.decode("ascii") + ) + await stream.do_handshake() + + return SocketStream(stream=stream) + + async def open_uds_stream( + self, + path: str, + hostname: bytes, + ssl_context: Optional[SSLContext], + timeout: TimeoutDict, + ) -> AsyncSocketStream: + connect_timeout = none_as_inf(timeout.get("connect")) + exc_map = { + OSError: ConnectError, + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + + with map_exceptions(exc_map): + with trio.fail_after(connect_timeout): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + + if ssl_context is not None: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=hostname.decode("ascii") + ) + await stream.do_handshake() + + return SocketStream(stream=stream) + + def create_lock(self) -> AsyncLock: + return Lock() + + def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore: + return Semaphore(max_value, exc_class=exc_class) + + async def time(self) -> float: + return trio.current_time() + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_bytestreams.py b/.venv/lib/python3.9/site-packages/httpcore/_bytestreams.py new file mode 100644 index 0000000..317f411 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_bytestreams.py @@ -0,0 +1,96 @@ +from typing import AsyncIterator, Callable, Iterator + +from ._async.base import AsyncByteStream +from ._sync.base import SyncByteStream + + +class ByteStream(AsyncByteStream, SyncByteStream): + """ + A concrete implementation for either sync or async byte streams. + + Example:: + + stream = httpcore.ByteStream(b"123") + + Parameters + ---------- + content: + A plain byte string used as the content of the stream. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._content + + +class IteratorByteStream(SyncByteStream): + """ + A concrete implementation for sync byte streams. + + Example:: + + def generate_content(): + yield b"Hello, world!" + ... + + stream = httpcore.IteratorByteStream(generate_content()) + + Parameters + ---------- + iterator: + A sync byte iterator, used as the content of the stream. + close_func: + An optional function called when closing the stream. + """ + + def __init__(self, iterator: Iterator[bytes], close_func: Callable = None) -> None: + self._iterator = iterator + self._close_func = close_func + + def __iter__(self) -> Iterator[bytes]: + for chunk in self._iterator: + yield chunk + + def close(self) -> None: + if self._close_func is not None: + self._close_func() + + +class AsyncIteratorByteStream(AsyncByteStream): + """ + A concrete implementation for async byte streams. + + Example:: + + async def generate_content(): + yield b"Hello, world!" + ... + + stream = httpcore.AsyncIteratorByteStream(generate_content()) + + Parameters + ---------- + aiterator: + An async byte iterator, used as the content of the stream. + aclose_func: + An optional async function called when closing the stream. + """ + + def __init__( + self, aiterator: AsyncIterator[bytes], aclose_func: Callable = None + ) -> None: + self._aiterator = aiterator + self._aclose_func = aclose_func + + async def __aiter__(self) -> AsyncIterator[bytes]: + async for chunk in self._aiterator: + yield chunk + + async def aclose(self) -> None: + if self._aclose_func is not None: + await self._aclose_func() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_exceptions.py b/.venv/lib/python3.9/site-packages/httpcore/_exceptions.py new file mode 100644 index 0000000..ba56829 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_exceptions.py @@ -0,0 +1,79 @@ +import contextlib +from typing import Dict, Iterator, Type + + +@contextlib.contextmanager +def map_exceptions(map: Dict[Type[Exception], Type[Exception]]) -> Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from None + raise + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +class ProxyError(Exception): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass + + +class CloseError(NetworkError): + pass diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/__init__.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/base.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/base.py new file mode 100644 index 0000000..45ef4ab --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/base.py @@ -0,0 +1,122 @@ +import enum +from types import TracebackType +from typing import Iterator, Tuple, Type + +from .._types import URL, Headers, T + + +class NewConnectionRequired(Exception): + pass + + +class ConnectionState(enum.IntEnum): + """ + PENDING READY + | | ^ + v V | + ACTIVE | + | | | + | V | + V IDLE-+ + FULL | + | | + V V + CLOSED + """ + + PENDING = 0 # Connection not yet acquired. + READY = 1 # Re-acquired from pool, about to send a request. + ACTIVE = 2 # Active requests. + FULL = 3 # Active requests, no more stream IDs available. + IDLE = 4 # No active requests. + CLOSED = 5 # Connection closed. + + +class SyncByteStream: + """ + The base interface for request and response bodies. + + Concrete implementations should subclass this class, and implement + the :meth:`__iter__` method, and optionally the :meth:`close` method. + """ + + def __iter__(self) -> Iterator[bytes]: + """ + Yield bytes representing the request or response body. + """ + yield b"" # pragma: nocover + + def close(self) -> None: + """ + Must be called by the client to indicate that the stream has been closed. + """ + pass # pragma: nocover + + def read(self) -> bytes: + try: + return b"".join([part for part in self]) + finally: + self.close() + + +class SyncHTTPTransport: + """ + The base interface for sending HTTP requests. + + Concrete implementations should subclass this class, and implement + the :meth:`handle_request` method, and optionally the :meth:`close` method. + """ + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + The interface for sending a single HTTP request, and returning a response. + + Parameters + ---------- + method: + The HTTP method, such as ``b'GET'``. + url: + The URL as a 4-tuple of (scheme, host, port, path). + headers: + Any HTTP headers to send with the request. + stream: + The body of the HTTP request. + extensions: + A dictionary of optional extensions. + + Returns + ------- + status_code: + The HTTP status code, such as ``200``. + headers: + Any HTTP headers included on the response. + stream: + The body of the HTTP response. + extensions: + A dictionary of optional extensions. + """ + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + """ + Close the implementation, which should close any outstanding response streams, + and any keep alive connections. + """ + + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.close() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/connection.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/connection.py new file mode 100644 index 0000000..382a4f9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,220 @@ +from ssl import SSLContext +from typing import List, Optional, Tuple, cast + +from .._backends.sync import SyncBackend, SyncLock, SyncSocketStream, SyncBackend +from .._exceptions import ConnectError, ConnectTimeout +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import exponential_backoff, get_logger, url_to_origin +from .base import SyncByteStream, SyncHTTPTransport, NewConnectionRequired +from .http import SyncBaseHTTPConnection +from .http11 import SyncHTTP11Connection + +logger = get_logger(__name__) + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +class SyncHTTPConnection(SyncHTTPTransport): + def __init__( + self, + origin: Origin, + http1: bool = True, + http2: bool = False, + keepalive_expiry: float = None, + uds: str = None, + ssl_context: SSLContext = None, + socket: SyncSocketStream = None, + local_address: str = None, + retries: int = 0, + backend: SyncBackend = None, + ): + self.origin = origin + self._http1_enabled = http1 + self._http2_enabled = http2 + self._keepalive_expiry = keepalive_expiry + self._uds = uds + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self.socket = socket + self._local_address = local_address + self._retries = retries + + alpn_protocols: List[str] = [] + if http1: + alpn_protocols.append("http/1.1") + if http2: + alpn_protocols.append("h2") + + self._ssl_context.set_alpn_protocols(alpn_protocols) + + self.connection: Optional[SyncBaseHTTPConnection] = None + self._is_http11 = False + self._is_http2 = False + self._connect_failed = False + self._expires_at: Optional[float] = None + self._backend = SyncBackend() if backend is None else backend + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + if self.connection is None: + return "Connection failed" if self._connect_failed else "Connecting" + return self.connection.info() + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + This occurs when any of the following occur: + + * There are no active requests on an HTTP/1.1 connection, and the underlying + socket is readable. The only valid state the socket can be readable in + if this occurs is when the b"" EOF marker is about to be returned, + indicating a server disconnect. + * There are no active requests being made and the keepalive timeout has passed. + """ + if self.connection is None: + return False + return self.connection.should_close() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + if self.connection is None: + return False + return self.connection.is_idle() + + def is_closed(self) -> bool: + if self.connection is None: + return self._connect_failed + return self.connection.is_closed() + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not currently be exceeding the maximum number of allowable concurrent + streams and must not have exhausted the maximum total number of stream IDs. + """ + if self.connection is None: + return self._http2_enabled and not self.is_closed + return self.connection.is_available() + + @property + def request_lock(self) -> SyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_request_lock"): + self._request_lock = self._backend.create_lock() + return self._request_lock + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + assert url_to_origin(url) == self.origin + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + with self.request_lock: + if self.connection is None: + if self._connect_failed: + raise NewConnectionRequired() + if not self.socket: + logger.trace( + "open_socket origin=%r timeout=%r", self.origin, timeout + ) + self.socket = self._open_socket(timeout) + self._create_connection(self.socket) + elif not self.connection.is_available(): + raise NewConnectionRequired() + + assert self.connection is not None + logger.trace( + "connection.handle_request method=%r url=%r headers=%r", + method, + url, + headers, + ) + return self.connection.handle_request( + method, url, headers, stream, extensions + ) + + def _open_socket(self, timeout: TimeoutDict = None) -> SyncSocketStream: + scheme, hostname, port = self.origin + timeout = {} if timeout is None else timeout + ssl_context = self._ssl_context if scheme == b"https" else None + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + return self._backend.open_tcp_stream( + hostname, + port, + ssl_context, + timeout, + local_address=self._local_address, + ) + else: + return self._backend.open_uds_stream( + self._uds, hostname, ssl_context, timeout + ) + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + self._connect_failed = True + raise + retries_left -= 1 + delay = next(delays) + self._backend.sleep(delay) + except Exception: # noqa: PIE786 + self._connect_failed = True + raise + + def _create_connection(self, socket: SyncSocketStream) -> None: + http_version = socket.get_http_version() + logger.trace( + "create_connection socket=%r http_version=%r", socket, http_version + ) + if http_version == "HTTP/2" or ( + self._http2_enabled and not self._http1_enabled + ): + from .http2 import SyncHTTP2Connection + + self._is_http2 = True + self.connection = SyncHTTP2Connection( + socket=socket, + keepalive_expiry=self._keepalive_expiry, + backend=self._backend, + ) + else: + self._is_http11 = True + self.connection = SyncHTTP11Connection( + socket=socket, keepalive_expiry=self._keepalive_expiry + ) + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> None: + if self.connection is not None: + logger.trace("start_tls hostname=%r timeout=%r", hostname, timeout) + self.socket = self.connection.start_tls( + hostname, ssl_context, timeout + ) + logger.trace("start_tls complete hostname=%r timeout=%r", hostname, timeout) + + def close(self) -> None: + with self.request_lock: + if self.connection is not None: + self.connection.close() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/connection_pool.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 0000000..0bd759d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,362 @@ +import warnings +from ssl import SSLContext +from typing import ( + Iterator, + Callable, + Dict, + List, + Optional, + Set, + Tuple, + Union, + cast, +) + +from .._backends.sync import SyncBackend, SyncLock, SyncSemaphore +from .._backends.base import lookup_sync_backend +from .._exceptions import LocalProtocolError, PoolTimeout, UnsupportedProtocol +from .._threadlock import ThreadLock +from .._types import URL, Headers, Origin, TimeoutDict +from .._utils import get_logger, origin_to_url_string, url_to_origin +from .base import SyncByteStream, SyncHTTPTransport, NewConnectionRequired +from .connection import SyncHTTPConnection + +logger = get_logger(__name__) + + +class NullSemaphore(SyncSemaphore): + def __init__(self) -> None: + pass + + def acquire(self, timeout: float = None) -> None: + return + + def release(self) -> None: + return + + +class ResponseByteStream(SyncByteStream): + def __init__( + self, + stream: SyncByteStream, + connection: SyncHTTPConnection, + callback: Callable, + ) -> None: + """ + A wrapper around the response stream that we return from + `.handle_request()`. + + Ensures that when `stream.close()` is called, the connection pool + is notified via a callback. + """ + self.stream = stream + self.connection = connection + self.callback = callback + + def __iter__(self) -> Iterator[bytes]: + for chunk in self.stream: + yield chunk + + def close(self) -> None: + try: + # Call the underlying stream close callback. + # This will be a call to `SyncHTTP11Connection._response_closed()` + # or `SyncHTTP2Stream._response_closed()`. + self.stream.close() + finally: + # Call the connection pool close callback. + # This will be a call to `SyncConnectionPool._response_closed()`. + self.callback(self.connection) + + +class SyncConnectionPool(SyncHTTPTransport): + """ + A connection pool for making HTTP requests. + + Parameters + ---------- + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + keepalive_expiry: + The maximum time to allow before closing a keep-alive connection. + http1: + Enable/Disable HTTP/1.1 support. Defaults to True. + http2: + Enable/Disable HTTP/2 support. Defaults to False. + uds: + Path to a Unix Domain Socket to use instead of TCP sockets. + local_address: + Local address to connect from. Can also be used to connect using a particular + address family. Using ``local_address="0.0.0.0"`` will connect using an + ``AF_INET`` address (IPv4), while using ``local_address="::"`` will connect + using an ``AF_INET6`` address (IPv6). + retries: + The maximum number of retries when trying to establish a connection. + backend: + A name indicating which concurrency backend to use. + """ + + def __init__( + self, + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http1: bool = True, + http2: bool = False, + uds: str = None, + local_address: str = None, + retries: int = 0, + max_keepalive: int = None, + backend: Union[SyncBackend, str] = "sync", + ): + if max_keepalive is not None: + warnings.warn( + "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.", + DeprecationWarning, + ) + max_keepalive_connections = max_keepalive + + if isinstance(backend, str): + backend = lookup_sync_backend(backend) + + self._ssl_context = SSLContext() if ssl_context is None else ssl_context + self._max_connections = max_connections + self._max_keepalive_connections = max_keepalive_connections + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._uds = uds + self._local_address = local_address + self._retries = retries + self._connections: Dict[Origin, Set[SyncHTTPConnection]] = {} + self._thread_lock = ThreadLock() + self._backend = backend + self._next_keepalive_check = 0.0 + + if not (http1 or http2): + raise ValueError("Either http1 or http2 must be True.") + + if http2: + try: + import h2 # noqa: F401 + except ImportError: + raise ImportError( + "Attempted to use http2=True, but the 'h2' " + "package is not installed. Use 'pip install httpcore[http2]'." + ) + + @property + def _connection_semaphore(self) -> SyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_internal_semaphore"): + if self._max_connections is not None: + self._internal_semaphore = self._backend.create_semaphore( + self._max_connections, exc_class=PoolTimeout + ) + else: + self._internal_semaphore = NullSemaphore() + + return self._internal_semaphore + + @property + def _connection_acquiry_lock(self) -> SyncLock: + if not hasattr(self, "_internal_connection_acquiry_lock"): + self._internal_connection_acquiry_lock = self._backend.create_lock() + return self._internal_connection_acquiry_lock + + def _create_connection( + self, + origin: Tuple[bytes, bytes, int], + ) -> SyncHTTPConnection: + return SyncHTTPConnection( + origin=origin, + http1=self._http1, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + uds=self._uds, + ssl_context=self._ssl_context, + local_address=self._local_address, + retries=self._retries, + backend=self._backend, + ) + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + if not url[0]: + raise UnsupportedProtocol( + "Request URL missing either an 'http://' or 'https://' protocol." + ) + + if url[0] not in (b"http", b"https"): + protocol = url[0].decode("ascii") + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{protocol}://'." + ) + + if not url[1]: + raise LocalProtocolError("Missing hostname in URL.") + + origin = url_to_origin(url) + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + self._keepalive_sweep() + + connection: Optional[SyncHTTPConnection] = None + while connection is None: + with self._connection_acquiry_lock: + # We get-or-create a connection as an atomic operation, to ensure + # that HTTP/2 requests issued in close concurrency will end up + # on the same connection. + logger.trace("get_connection_from_pool=%r", origin) + connection = self._get_connection_from_pool(origin) + + if connection is None: + connection = self._create_connection(origin=origin) + logger.trace("created connection=%r", connection) + self._add_to_pool(connection, timeout=timeout) + else: + logger.trace("reuse connection=%r", connection) + + try: + response = connection.handle_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + except NewConnectionRequired: + connection = None + except BaseException: # noqa: PIE786 + # See https://github.com/encode/httpcore/pull/305 for motivation + # behind catching 'BaseException' rather than 'Exception' here. + logger.trace("remove from pool connection=%r", connection) + self._remove_from_pool(connection) + raise + + status_code, headers, stream, extensions = response + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + return status_code, headers, wrapped_stream, extensions + + def _get_connection_from_pool( + self, origin: Origin + ) -> Optional[SyncHTTPConnection]: + # Determine expired keep alive connections on this origin. + reuse_connection = None + connections_to_close = set() + + for connection in self._connections_for_origin(origin): + if connection.should_close(): + connections_to_close.add(connection) + self._remove_from_pool(connection) + elif connection.is_available(): + reuse_connection = connection + + # Close any dropped connections. + for connection in connections_to_close: + connection.close() + + return reuse_connection + + def _response_closed(self, connection: SyncHTTPConnection) -> None: + remove_from_pool = False + close_connection = False + + if connection.is_closed(): + remove_from_pool = True + elif connection.is_idle(): + num_connections = len(self._get_all_connections()) + if ( + self._max_keepalive_connections is not None + and num_connections > self._max_keepalive_connections + ): + remove_from_pool = True + close_connection = True + + if remove_from_pool: + self._remove_from_pool(connection) + + if close_connection: + connection.close() + + def _keepalive_sweep(self) -> None: + """ + Remove any IDLE connections that have expired past their keep-alive time. + """ + if self._keepalive_expiry is None: + return + + now = self._backend.time() + if now < self._next_keepalive_check: + return + + self._next_keepalive_check = now + min(1.0, self._keepalive_expiry) + connections_to_close = set() + + for connection in self._get_all_connections(): + if connection.should_close(): + connections_to_close.add(connection) + self._remove_from_pool(connection) + + for connection in connections_to_close: + connection.close() + + def _add_to_pool( + self, connection: SyncHTTPConnection, timeout: TimeoutDict + ) -> None: + logger.trace("adding connection to pool=%r", connection) + self._connection_semaphore.acquire(timeout=timeout.get("pool", None)) + with self._thread_lock: + self._connections.setdefault(connection.origin, set()) + self._connections[connection.origin].add(connection) + + def _remove_from_pool(self, connection: SyncHTTPConnection) -> None: + logger.trace("removing connection from pool=%r", connection) + with self._thread_lock: + if connection in self._connections.get(connection.origin, set()): + self._connection_semaphore.release() + self._connections[connection.origin].remove(connection) + if not self._connections[connection.origin]: + del self._connections[connection.origin] + + def _connections_for_origin(self, origin: Origin) -> Set[SyncHTTPConnection]: + return set(self._connections.get(origin, set())) + + def _get_all_connections(self) -> Set[SyncHTTPConnection]: + connections: Set[SyncHTTPConnection] = set() + for connection_set in self._connections.values(): + connections |= connection_set + return connections + + def close(self) -> None: + connections = self._get_all_connections() + for connection in connections: + self._remove_from_pool(connection) + + # Close all connections + for connection in connections: + connection.close() + + def get_connection_info(self) -> Dict[str, List[str]]: + """ + Returns a dict of origin URLs to a list of summary strings for each connection. + """ + self._keepalive_sweep() + + stats = {} + for origin, connections in self._connections.items(): + stats[origin_to_url_string(origin)] = sorted( + [connection.info() for connection in connections] + ) + return stats diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/http.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/http.py new file mode 100644 index 0000000..c128a96 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/http.py @@ -0,0 +1,42 @@ +from ssl import SSLContext + +from .._backends.sync import SyncSocketStream +from .._types import TimeoutDict +from .base import SyncHTTPTransport + + +class SyncBaseHTTPConnection(SyncHTTPTransport): + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> SyncSocketStream: + """ + Upgrade the underlying socket to TLS. + """ + raise NotImplementedError() # pragma: nocover diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/http11.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/http11.py new file mode 100644 index 0000000..5dbb42e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,269 @@ +import enum +import time +from ssl import SSLContext +from typing import Iterator, List, Optional, Tuple, Union, cast + +import h11 + +from .._backends.sync import SyncSocketStream +from .._bytestreams import IteratorByteStream +from .._exceptions import LocalProtocolError, RemoteProtocolError, map_exceptions +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import SyncByteStream, NewConnectionRequired +from .http import SyncBaseHTTPConnection + +H11Event = Union[ + h11.Request, + h11.Response, + h11.InformationalResponse, + h11.Data, + h11.EndOfMessage, + h11.ConnectionClosed, +] + + +class ConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +logger = get_logger(__name__) + + +class SyncHTTP11Connection(SyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + + def __init__(self, socket: SyncSocketStream, keepalive_expiry: float = None): + self.socket = socket + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._h11_state = h11.Connection(our_role=h11.CLIENT) + self._state = ConnectionState.NEW + + def __repr__(self) -> str: + return f"" + + def _now(self) -> float: + return time.monotonic() + + def _server_disconnected(self) -> bool: + """ + Return True if the connection is idle, and the underlying socket is readable. + The only valid state the socket can be readable here is when the b"" + EOF marker is about to be returned, indicating a server disconnect. + """ + return self._state == ConnectionState.IDLE and self.socket.is_readable() + + def _keepalive_expired(self) -> bool: + """ + Return True if the connection is idle, and has passed it's keepalive + expiry time. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def info(self) -> str: + return f"HTTP/1.1, {self._state.name}" + + def should_close(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + """ + return self._server_disconnected() or self._keepalive_expired() + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + """ + return self._state == ConnectionState.IDLE + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + Send a single HTTP/1.1 request. + + Note that there is no kind of task/thread locking at this layer of interface. + Dealing with locking for concurrency is handled by the `SyncHTTPConnection`. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + if self._state in (ConnectionState.NEW, ConnectionState.IDLE): + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + else: + raise NewConnectionRequired() + + self._send_request(method, url, headers, timeout) + self._send_request_body(stream, timeout) + ( + http_version, + status_code, + reason_phrase, + headers, + ) = self._receive_response(timeout) + response_stream = IteratorByteStream( + iterator=self._receive_response_data(timeout), + close_func=self._response_closed, + ) + extensions = { + "http_version": http_version, + "reason_phrase": reason_phrase, + } + return (status_code, headers, response_stream, extensions) + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> SyncSocketStream: + timeout = {} if timeout is None else timeout + self.socket = self.socket.start_tls(hostname, ssl_context, timeout) + return self.socket + + def _send_request( + self, method: bytes, url: URL, headers: Headers, timeout: TimeoutDict + ) -> None: + """ + Send the request line and headers. + """ + logger.trace("send_request method=%r url=%r headers=%s", method, url, headers) + _scheme, _host, _port, target = url + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request(method=method, target=target, headers=headers) + self._send_event(event, timeout) + + def _send_request_body( + self, stream: SyncByteStream, timeout: TimeoutDict + ) -> None: + """ + Send the request body. + """ + # Send the request body. + for chunk in stream: + logger.trace("send_data=Data(<%d bytes>)", len(chunk)) + event = h11.Data(data=chunk) + self._send_event(event, timeout) + + # Finalize sending the request. + event = h11.EndOfMessage() + self._send_event(event, timeout) + + def _send_event(self, event: H11Event, timeout: TimeoutDict) -> None: + """ + Send a single `h11` event to the network, waiting for the data to + drain before returning. + """ + bytes_to_send = self._h11_state.send(event) + self.socket.write(bytes_to_send, timeout) + + def _receive_response( + self, timeout: TimeoutDict + ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = self._receive_event(timeout) + if isinstance(event, h11.Response): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + return http_version, event.status_code, event.reason, headers + + def _receive_response_data( + self, timeout: TimeoutDict + ) -> Iterator[bytes]: + """ + Read the response data from the network. + """ + while True: + event = self._receive_event(timeout) + if isinstance(event, h11.Data): + logger.trace("receive_event=Data(<%d bytes>)", len(event.data)) + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + logger.trace("receive_event=%r", event) + break + + def _receive_event(self, timeout: TimeoutDict) -> H11Event: + """ + Read a single `h11` event, reading more data from the network if needed. + """ + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self.socket.read(self.READ_NUM_BYTES, timeout) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle messaging for this case distinctly. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + assert event is not h11.NEED_DATA + break + return event + + def _response_closed(self) -> None: + logger.trace( + "response_closed our_state=%r their_state=%r", + self._h11_state.our_state, + self._h11_state.their_state, + ) + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._h11_state.start_next_cycle() + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = self._now() + self._keepalive_expiry + else: + self.close() + + def close(self) -> None: + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + if self._h11_state.our_state is h11.MUST_CLOSE: + event = h11.ConnectionClosed() + self._h11_state.send(event) + + self.socket.close() diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/http2.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/http2.py new file mode 100644 index 0000000..90caf5f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,446 @@ +import enum +import time +from ssl import SSLContext +from typing import Iterator, Dict, List, Optional, Tuple, cast + +import h2.connection +import h2.events +from h2.config import H2Configuration +from h2.exceptions import NoAvailableStreamIDError +from h2.settings import SettingCodes, Settings + +from .._backends.sync import SyncBackend, SyncLock, SyncSemaphore, SyncSocketStream +from .._bytestreams import IteratorByteStream +from .._exceptions import LocalProtocolError, PoolTimeout, RemoteProtocolError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger +from .base import SyncByteStream, NewConnectionRequired +from .http import SyncBaseHTTPConnection + +logger = get_logger(__name__) + + +class ConnectionState(enum.IntEnum): + IDLE = 0 + ACTIVE = 1 + CLOSED = 2 + + +class SyncHTTP2Connection(SyncBaseHTTPConnection): + READ_NUM_BYTES = 64 * 1024 + CONFIG = H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + socket: SyncSocketStream, + backend: SyncBackend, + keepalive_expiry: float = None, + ): + self.socket = socket + + self._backend = backend + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + + self._sent_connection_init = False + self._streams: Dict[int, SyncHTTP2Stream] = {} + self._events: Dict[int, List[h2.events.Event]] = {} + + self._keepalive_expiry: Optional[float] = keepalive_expiry + self._should_expire_at: Optional[float] = None + self._state = ConnectionState.ACTIVE + self._exhausted_available_stream_ids = False + + def __repr__(self) -> str: + return f"" + + def info(self) -> str: + return f"HTTP/2, {self._state.name}, {len(self._streams)} streams" + + def _now(self) -> float: + return time.monotonic() + + def should_close(self) -> bool: + """ + Return `True` if the connection is currently idle, and the keepalive + timeout has passed. + """ + return ( + self._state == ConnectionState.IDLE + and self._should_expire_at is not None + and self._now() >= self._should_expire_at + ) + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + return self._state == ConnectionState.IDLE + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + """ + return self._state == ConnectionState.CLOSED + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an outgoing request. + This occurs when any of the following occur: + + * The connection has not yet been opened, and HTTP/2 support is enabled. + We don't *know* at this point if we'll end up on an HTTP/2 connection or + not, but we *might* do, so we indicate availability. + * The connection has been opened, and is currently idle. + * The connection is open, and is an HTTP/2 connection. The connection must + also not have exhausted the maximum total number of stream IDs. + """ + return ( + self._state != ConnectionState.CLOSED + and not self._exhausted_available_stream_ids + ) + + @property + def init_lock(self) -> SyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_initialization_lock"): + self._initialization_lock = self._backend.create_lock() + return self._initialization_lock + + @property + def read_lock(self) -> SyncLock: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_read_lock"): + self._read_lock = self._backend.create_lock() + return self._read_lock + + @property + def max_streams_semaphore(self) -> SyncSemaphore: + # We do this lazily, to make sure backend autodetection always + # runs within an async context. + if not hasattr(self, "_max_streams_semaphore"): + max_streams = self._h2_state.local_settings.max_concurrent_streams + self._max_streams_semaphore = self._backend.create_semaphore( + max_streams, exc_class=PoolTimeout + ) + return self._max_streams_semaphore + + def start_tls( + self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict = None + ) -> SyncSocketStream: + raise NotImplementedError("TLS upgrade not supported on HTTP/2 connections.") + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + with self.init_lock: + if not self._sent_connection_init: + # The very first stream is responsible for initiating the connection. + self._state = ConnectionState.ACTIVE + self.send_connection_init(timeout) + self._sent_connection_init = True + + self.max_streams_semaphore.acquire() + try: + try: + stream_id = self._h2_state.get_next_available_stream_id() + except NoAvailableStreamIDError: + self._exhausted_available_stream_ids = True + raise NewConnectionRequired() + else: + self._state = ConnectionState.ACTIVE + self._should_expire_at = None + + h2_stream = SyncHTTP2Stream(stream_id=stream_id, connection=self) + self._streams[stream_id] = h2_stream + self._events[stream_id] = [] + return h2_stream.handle_request( + method, url, headers, stream, extensions + ) + except Exception: # noqa: PIE786 + self.max_streams_semaphore.release() + raise + + def send_connection_init(self, timeout: TimeoutDict) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + SettingCodes.MAX_CONCURRENT_STREAMS: 100, + SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + logger.trace("initiate_connection=%r", self) + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2 ** 24) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def is_socket_readable(self) -> bool: + return self.socket.is_readable() + + def close(self) -> None: + logger.trace("close_connection=%r", self) + if self._state != ConnectionState.CLOSED: + self._state = ConnectionState.CLOSED + + self.socket.close() + + def wait_for_outgoing_flow(self, stream_id: int, timeout: TimeoutDict) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + while flow == 0: + self.receive_events(timeout) + local_flow = self._h2_state.local_flow_control_window(stream_id) + connection_flow = self._h2_state.max_outbound_frame_size + flow = min(local_flow, connection_flow) + return flow + + def wait_for_event( + self, stream_id: int, timeout: TimeoutDict + ) -> h2.events.Event: + """ + Returns the next event for a given stream. + If no events are available yet, then waits on the network until + an event is available. + """ + with self.read_lock: + while not self._events[stream_id]: + self.receive_events(timeout) + return self._events[stream_id].pop(0) + + def receive_events(self, timeout: TimeoutDict) -> None: + """ + Read some data from the network, and update the H2 state. + """ + data = self.socket.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + + events = self._h2_state.receive_data(data) + for event in events: + event_stream_id = getattr(event, "stream_id", 0) + logger.trace("receive_event stream_id=%r event=%s", event_stream_id, event) + + if hasattr(event, "error_code"): + raise RemoteProtocolError(event) + + if event_stream_id in self._events: + self._events[event_stream_id].append(event) + + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def send_headers( + self, stream_id: int, headers: Headers, end_stream: bool, timeout: TimeoutDict + ) -> None: + logger.trace("send_headers stream_id=%r headers=%r", stream_id, headers) + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2 ** 24, stream_id=stream_id) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def send_data( + self, stream_id: int, chunk: bytes, timeout: TimeoutDict + ) -> None: + logger.trace("send_data stream_id=%r chunk=%r", stream_id, chunk) + self._h2_state.send_data(stream_id, chunk) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def end_stream(self, stream_id: int, timeout: TimeoutDict) -> None: + logger.trace("end_stream stream_id=%r", stream_id) + self._h2_state.end_stream(stream_id) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def acknowledge_received_data( + self, stream_id: int, amount: int, timeout: TimeoutDict + ) -> None: + self._h2_state.acknowledge_received_data(amount, stream_id) + data_to_send = self._h2_state.data_to_send() + self.socket.write(data_to_send, timeout) + + def close_stream(self, stream_id: int) -> None: + try: + logger.trace("close_stream stream_id=%r", stream_id) + del self._streams[stream_id] + del self._events[stream_id] + + if not self._streams: + if self._state == ConnectionState.ACTIVE: + if self._exhausted_available_stream_ids: + self.close() + else: + self._state = ConnectionState.IDLE + if self._keepalive_expiry is not None: + self._should_expire_at = ( + self._now() + self._keepalive_expiry + ) + finally: + self.max_streams_semaphore.release() + + +class SyncHTTP2Stream: + def __init__(self, stream_id: int, connection: SyncHTTP2Connection) -> None: + self.stream_id = stream_id + self.connection = connection + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + headers = [(k.lower(), v) for (k, v) in headers] + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + + # Send the request. + seen_headers = set(key for key, value in headers) + has_body = ( + b"content-length" in seen_headers or b"transfer-encoding" in seen_headers + ) + + self.send_headers(method, url, headers, has_body, timeout) + if has_body: + self.send_body(stream, timeout) + + # Receive the response. + status_code, headers = self.receive_response(timeout) + response_stream = IteratorByteStream( + iterator=self.body_iter(timeout), close_func=self._response_closed + ) + + extensions = { + "http_version": b"HTTP/2", + } + return (status_code, headers, response_stream, extensions) + + def send_headers( + self, + method: bytes, + url: URL, + headers: Headers, + has_body: bool, + timeout: TimeoutDict, + ) -> None: + scheme, hostname, port, path = url + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = None + + for k, v in headers: + if k == b"host": + authority = v + break + + if authority is None: + # Mirror the same error we'd see with `h11`, so that the behaviour + # is consistent. Although we're dealing with an `:authority` + # pseudo-header by this point, from an end-user perspective the issue + # is that the outgoing request needed to include a `host` header. + raise LocalProtocolError("Missing mandatory Host: header") + + headers = [ + (b":method", method), + (b":authority", authority), + (b":scheme", scheme), + (b":path", path), + ] + [ + (k, v) + for k, v in headers + if k + not in ( + b"host", + b"transfer-encoding", + ) + ] + end_stream = not has_body + + self.connection.send_headers(self.stream_id, headers, end_stream, timeout) + + def send_body(self, stream: SyncByteStream, timeout: TimeoutDict) -> None: + for data in stream: + while data: + max_flow = self.connection.wait_for_outgoing_flow( + self.stream_id, timeout + ) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self.connection.send_data(self.stream_id, chunk, timeout) + + self.connection.end_stream(self.stream_id, timeout) + + def receive_response( + self, timeout: TimeoutDict + ) -> Tuple[int, List[Tuple[bytes, bytes]]]: + """ + Read the response status and headers from the network. + """ + while True: + event = self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def body_iter(self, timeout: TimeoutDict) -> Iterator[bytes]: + while True: + event = self.connection.wait_for_event(self.stream_id, timeout) + if isinstance(event, h2.events.DataReceived): + amount = event.flow_controlled_length + self.connection.acknowledge_received_data( + self.stream_id, amount, timeout + ) + yield event.data + elif isinstance(event, (h2.events.StreamEnded, h2.events.StreamReset)): + break + + def _response_closed(self) -> None: + self.connection.close_stream(self.stream_id) diff --git a/.venv/lib/python3.9/site-packages/httpcore/_sync/http_proxy.py b/.venv/lib/python3.9/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 0000000..78c02e2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,290 @@ +from http import HTTPStatus +from ssl import SSLContext +from typing import Tuple, cast + +from .._bytestreams import ByteStream +from .._exceptions import ProxyError +from .._types import URL, Headers, TimeoutDict +from .._utils import get_logger, url_to_origin +from .base import SyncByteStream +from .connection import SyncHTTPConnection +from .connection_pool import SyncConnectionPool, ResponseByteStream + +logger = get_logger(__name__) + + +def get_reason_phrase(status_code: int) -> str: + try: + return HTTPStatus(status_code).phrase + except ValueError: + return "" + + +def merge_headers( + default_headers: Headers = None, override_headers: Headers = None +) -> Headers: + """ + Append default_headers and override_headers, de-duplicating if a key existing in + both cases. + """ + default_headers = [] if default_headers is None else default_headers + override_headers = [] if override_headers is None else override_headers + has_override = set([key.lower() for key, value in override_headers]) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class SyncHTTPProxy(SyncConnectionPool): + """ + A connection pool for making HTTP requests via an HTTP proxy. + + Parameters + ---------- + proxy_url: + The URL of the proxy service as a 4-tuple of (scheme, host, port, path). + proxy_headers: + A list of proxy headers to include. + proxy_mode: + A proxy mode to operate in. May be "DEFAULT", "FORWARD_ONLY", or "TUNNEL_ONLY". + ssl_context: + An SSL context to use for verifying connections. + max_connections: + The maximum number of concurrent connections to allow. + max_keepalive_connections: + The maximum number of connections to allow before closing keep-alive + connections. + http2: + Enable HTTP/2 support. + """ + + def __init__( + self, + proxy_url: URL, + proxy_headers: Headers = None, + proxy_mode: str = "DEFAULT", + ssl_context: SSLContext = None, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: float = None, + http2: bool = False, + backend: str = "sync", + # Deprecated argument style: + max_keepalive: int = None, + ): + assert proxy_mode in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY") + + self.proxy_origin = url_to_origin(proxy_url) + self.proxy_headers = [] if proxy_headers is None else proxy_headers + self.proxy_mode = proxy_mode + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http2=http2, + backend=backend, + max_keepalive=max_keepalive, + ) + + def handle_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + if self._keepalive_expiry is not None: + self._keepalive_sweep() + + if ( + self.proxy_mode == "DEFAULT" and url[0] == b"http" + ) or self.proxy_mode == "FORWARD_ONLY": + # By default HTTP requests should be forwarded. + logger.trace( + "forward_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return self._forward_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + else: + # By default HTTPS should be tunnelled. + logger.trace( + "tunnel_request proxy_origin=%r proxy_headers=%r method=%r url=%r", + self.proxy_origin, + self.proxy_headers, + method, + url, + ) + return self._tunnel_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + def _forward_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + Forwarded proxy requests include the entire URL as the HTTP target, + rather than just the path. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = self.proxy_origin + connection = self._get_connection_from_pool(origin) + + if connection is None: + connection = SyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + self._add_to_pool(connection, timeout) + + # Issue a forwarded proxy request... + + # GET https://www.example.org/path HTTP/1.1 + # [proxy headers] + # [headers] + scheme, host, port, path = url + if port is None: + target = b"%b://%b%b" % (scheme, host, path) + else: + target = b"%b://%b:%d%b" % (scheme, host, port, path) + + url = self.proxy_origin + (target,) + headers = merge_headers(self.proxy_headers, headers) + + ( + status_code, + headers, + stream, + extensions, + ) = connection.handle_request( + method, url, headers=headers, stream=stream, extensions=extensions + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions + + def _tunnel_request( + self, + method: bytes, + url: URL, + headers: Headers, + stream: SyncByteStream, + extensions: dict, + ) -> Tuple[int, Headers, SyncByteStream, dict]: + """ + Tunnelled proxy requests require an initial CONNECT request to + establish the connection, and then send regular requests. + """ + timeout = cast(TimeoutDict, extensions.get("timeout", {})) + origin = url_to_origin(url) + connection = self._get_connection_from_pool(origin) + + if connection is None: + scheme, host, port = origin + + # First, create a connection to the proxy server + proxy_connection = SyncHTTPConnection( + origin=self.proxy_origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + ) + + # Issue a CONNECT request... + + # CONNECT www.example.org:80 HTTP/1.1 + # [proxy-headers] + target = b"%b:%d" % (host, port) + connect_url = self.proxy_origin + (target,) + connect_headers = [(b"Host", target), (b"Accept", b"*/*")] + connect_headers = merge_headers(connect_headers, self.proxy_headers) + + try: + ( + proxy_status_code, + _, + proxy_stream, + _, + ) = proxy_connection.handle_request( + b"CONNECT", + connect_url, + headers=connect_headers, + stream=ByteStream(b""), + extensions=extensions, + ) + + proxy_reason = get_reason_phrase(proxy_status_code) + logger.trace( + "tunnel_response proxy_status_code=%r proxy_reason=%r ", + proxy_status_code, + proxy_reason, + ) + # Read the response data without closing the socket + for _ in proxy_stream: + pass + + # See if the tunnel was successfully established. + if proxy_status_code < 200 or proxy_status_code > 299: + msg = "%d %s" % (proxy_status_code, proxy_reason) + raise ProxyError(msg) + + # Upgrade to TLS if required + # We assume the target speaks TLS on the specified port + if scheme == b"https": + proxy_connection.start_tls(host, self._ssl_context, timeout) + except Exception as exc: + proxy_connection.close() + raise ProxyError(exc) + + # The CONNECT request is successful, so we have now SWITCHED PROTOCOLS. + # This means the proxy connection is now unusable, and we must create + # a new one for regular requests, making sure to use the same socket to + # retain the tunnel. + connection = SyncHTTPConnection( + origin=origin, + http2=self._http2, + keepalive_expiry=self._keepalive_expiry, + ssl_context=self._ssl_context, + socket=proxy_connection.socket, + ) + self._add_to_pool(connection, timeout) + + # Once the connection has been established we can send requests on + # it as normal. + ( + status_code, + headers, + stream, + extensions, + ) = connection.handle_request( + method, + url, + headers=headers, + stream=stream, + extensions=extensions, + ) + + wrapped_stream = ResponseByteStream( + stream, connection=connection, callback=self._response_closed + ) + + return status_code, headers, wrapped_stream, extensions diff --git a/.venv/lib/python3.9/site-packages/httpcore/_threadlock.py b/.venv/lib/python3.9/site-packages/httpcore/_threadlock.py new file mode 100644 index 0000000..2ff2bc3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_threadlock.py @@ -0,0 +1,35 @@ +import threading +from types import TracebackType +from typing import Type + + +class ThreadLock: + """ + Provides thread safety when used as a sync context manager, or a + no-op when used as an async context manager. + """ + + def __init__(self) -> None: + self.lock = threading.Lock() + + def __enter__(self) -> None: + self.lock.acquire() + + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.lock.release() + + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + pass diff --git a/.venv/lib/python3.9/site-packages/httpcore/_types.py b/.venv/lib/python3.9/site-packages/httpcore/_types.py new file mode 100644 index 0000000..2f9eeba --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_types.py @@ -0,0 +1,12 @@ +""" +Type definitions for type checking purposes. +""" + +from typing import List, Mapping, Optional, Tuple, TypeVar, Union + +T = TypeVar("T") +StrOrBytes = Union[str, bytes] +Origin = Tuple[bytes, bytes, int] +URL = Tuple[bytes, bytes, Optional[int], bytes] +Headers = List[Tuple[bytes, bytes]] +TimeoutDict = Mapping[str, Optional[float]] diff --git a/.venv/lib/python3.9/site-packages/httpcore/_utils.py b/.venv/lib/python3.9/site-packages/httpcore/_utils.py new file mode 100644 index 0000000..978b87a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpcore/_utils.py @@ -0,0 +1,105 @@ +import itertools +import logging +import os +import select +import socket +import sys +import typing + +from ._types import URL, Origin + +_LOGGER_INITIALIZED = False +TRACE_LOG_LEVEL = 5 +DEFAULT_PORTS = {b"http": 80, b"https": 443} + + +class Logger(logging.Logger): + # Stub for type checkers. + def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + ... # pragma: nocover + + +def get_logger(name: str) -> Logger: + """ + Get a `logging.Logger` instance, and optionally + set up debug logging based on the HTTPCORE_LOG_LEVEL or HTTPX_LOG_LEVEL + environment variables. + """ + global _LOGGER_INITIALIZED + if not _LOGGER_INITIALIZED: + _LOGGER_INITIALIZED = True + logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") + + log_level = os.environ.get( + "HTTPCORE_LOG_LEVEL", os.environ.get("HTTPX_LOG_LEVEL", "") + ).upper() + if log_level in ("DEBUG", "TRACE"): + logger = logging.getLogger("httpcore") + logger.setLevel(logging.DEBUG if log_level == "DEBUG" else TRACE_LOG_LEVEL) + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter( + logging.Formatter( + fmt="%(levelname)s [%(asctime)s] %(name)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + ) + logger.addHandler(handler) + + logger = logging.getLogger(name) + + def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) + + logger.trace = trace # type: ignore + + return typing.cast(Logger, logger) + + +def url_to_origin(url: URL) -> Origin: + scheme, host, explicit_port = url[:3] + default_port = DEFAULT_PORTS[scheme] + port = default_port if explicit_port is None else explicit_port + return scheme, host, port + + +def origin_to_url_string(origin: Origin) -> str: + scheme, host, explicit_port = origin + port = f":{explicit_port}" if explicit_port != DEFAULT_PORTS[scheme] else "" + return f"{scheme.decode('ascii')}://{host.decode('ascii')}{port}" + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + yield 0 + for n in itertools.count(2): + yield factor * (2 ** (n - 2)) + + +def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if sys.platform == "win32" or getattr(select, "poll", None) is None: + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/.venv/lib/python3.9/site-packages/httpcore/py.typed b/.venv/lib/python3.9/site-packages/httpcore/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/LICENSE.md b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/LICENSE.md new file mode 100644 index 0000000..ab79d16 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/METADATA b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/METADATA new file mode 100644 index 0000000..2d71a64 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/METADATA @@ -0,0 +1,972 @@ +Metadata-Version: 2.1 +Name: httpx +Version: 0.18.2 +Summary: The next generation HTTP client. +Home-page: https://github.com/encode/httpx +Author: Tom Christie +Author-email: tom@tomchristie.com +License: BSD +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Source, https://github.com/encode/httpx +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Requires-Dist: certifi +Requires-Dist: sniffio +Requires-Dist: rfc3986[idna2008] (<2,>=1.3) +Requires-Dist: httpcore (<0.14.0,>=0.13.3) +Requires-Dist: async-generator ; python_version < "3.7" +Provides-Extra: brotli +Requires-Dist: brotlicffi (==1.*) ; extra == 'brotli' +Provides-Extra: http2 +Requires-Dist: h2 (==3.*) ; extra == 'http2' + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client for Python 3, which provides sync and async APIs, and support for both HTTP/1.1 and HTTP/2. + +**Note**: _HTTPX should be considered in beta. We believe we've got the public API to +a stable point now, but would strongly recommend pinning your dependencies to the `0.18.*` +release, so that you're able to properly review [API changes between package updates](https://github.com/encode/httpx/blob/master/CHANGELOG.md). A 1.0 release is expected to be issued sometime in 2021._ + +--- + +Let's get started... + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the async API... + +_Use [IPython](https://ipython.readthedocs.io/en/stable/) or Python 3.8+ with `python -m asyncio` to try this code interactively._ + +```pycon +>>> import httpx +>>> async with httpx.AsyncClient() as client: +... r = await client.get('https://www.example.org/') +... +>>> r + +``` + +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.6+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third-party-packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. + * `h2` - HTTP/2 support. *(Optional)* +* `certifi` - SSL certificates. +* `rfc3986` - URL parsing & normalization. + * `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. +* `async_generator` - Backport support for `contextlib.asynccontextmanager`. *(Only required for Python 3.6)* +* `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +

— ⭐️ —

+

HTTPX is BSD licensed code. Designed & built in Brighton, England.

+ + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## 0.18.2 (17th June, 2021) + +### Added + +* Support for Python 3.10. (Pull #1687) +* Expose `httpx.USE_CLIENT_DEFAULT`, used as the default to `auth` and `timeout` parameters in request methods. (Pull #1634) +* Support [HTTP/2 "prior knowledge"](https://python-hyper.org/projects/hyper-h2/en/v2.3.1/negotiating-http2.html#prior-knowledge), using `httpx.Client(http1=False, http2=True)`. (Pull #1624) + +### Fixed + +* Clean up some cases where warnings were being issued. (Pull #1687) +* Prefer Content-Length over Transfer-Encoding: chunked for content= cases. (Pull #1619) + +## 0.18.1 (29th April, 2021) + +### Changed + +* Update brotli support to use the `brotlicffi` package (Pull #1605) +* Ensure that `Request(..., stream=...)` does not auto-generate any headers on the request instance. (Pull #1607) + +### Fixed + +* Pass through `timeout=...` in top-level httpx.stream() function. (Pull #1613) +* Map httpcore transport close exceptions to httpx exceptions. (Pull #1606) + +## 0.18.0 (27th April, 2021) + +The 0.18.x release series formalises our low-level Transport API, introducing the base classes `httpx.BaseTransport` and `httpx.AsyncBaseTransport`. + +See the "[Writing custom transports](https://www.python-httpx.org/advanced/#writing-custom-transports)" documentation and the [`httpx.BaseTransport.handle_request()`](https://github.com/encode/httpx/blob/397aad98fdc8b7580a5fc3e88f1578b4302c6382/httpx/_transports/base.py#L77-L147) docstring for more complete details on implementing custom transports. + +Pull request #1522 includes a checklist of differences from the previous `httpcore` transport API, for developers implementing custom transports. + +The following API changes have been issuing deprecation warnings since 0.17.0 onwards, and are now fully deprecated... + +* You should now use httpx.codes consistently instead of httpx.StatusCodes. +* Use limits=... instead of pool_limits=.... +* Use proxies={"http://": ...} instead of proxies={"http": ...} for scheme-specific mounting. + +### Changed + +* Transport instances now inherit from `httpx.BaseTransport` or `httpx.AsyncBaseTransport`, + and should implement either the `handle_request` method or `handle_async_request` method. (Pull #1522, #1550) +* The `response.ext` property and `Response(ext=...)` argument are now named `extensions`. (Pull #1522) +* The recommendation to not use `data=` in favour of `content=` has now been escalated to a deprecation warning. (Pull #1573) +* Drop `Response(on_close=...)` from API, since it was a bit of leaking implementation detail. (Pull #1572) +* When using a client instance, cookies should always be set on the client, rather than on a per-request basis. We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur. (Pull #1574) +* The runtime exception `httpx.ResponseClosed` is now named `httpx.StreamClosed`. (#1584) +* The `httpx.QueryParams` model now presents an immutable interface. There is a discussion on [the design and motivation here](https://github.com/encode/httpx/discussions/1599). Use `client.params = client.params.merge(...)` instead of `client.params.update(...)`. The basic query manipulation methods are `query.set(...)`, `query.add(...)`, and `query.remove()`. (#1600) + +### Added + +* The `Request` and `Response` classes can now be serialized using pickle. (#1579) +* Handle `data={"key": [None|int|float|bool]}` cases. (Pull #1539) +* Support `httpx.URL(**kwargs)`, for example `httpx.URL(scheme="https", host="www.example.com", path="/')`, or `httpx.URL("https://www.example.com/", username="tom@gmail.com", password="123 456")`. (Pull #1601) +* Support `url.copy_with(params=...)`. (Pull #1601) +* Add `url.params` parameter, returning an immutable `QueryParams` instance. (Pull #1601) +* Support query manipulation methods on the URL class. These are `url.copy_set_param()`, `url.copy_add_param()`, `url.copy_remove_param()`, `url.copy_merge_params()`. (Pull #1601) +* The `httpx.URL` class now performs port normalization, so `:80` ports are stripped from `http` URLs and `:443` ports are stripped from `https` URLs. (Pull #1603) +* The `URL.host` property returns unicode strings for internationalized domain names. The `URL.raw_host` property returns byte strings with IDNA escaping applied. (Pull #1590) + +### Fixed + +* Fix Content-Length for cases of `files=...` where unicode string is used as the file content. (Pull #1537) +* Fix some cases of merging relative URLs against `Client(base_url=...)`. (Pull #1532) +* The `request.content` attribute is now always available except for streaming content, which requires an explicit `.read()`. (Pull #1583) + +## 0.17.1 (March 15th, 2021) + +### Fixed + +* Type annotation on `CertTypes` allows `keyfile` and `password` to be optional. (Pull #1503) +* Fix httpcore pinned version. (Pull #1495) + +## 0.17.0 (February 28th, 2021) + +### Added + +* Add `httpx.MockTransport()`, allowing to mock out a transport using pre-determined responses. (Pull #1401, Pull #1449) +* Add `httpx.HTTPTransport()` and `httpx.AsyncHTTPTransport()` default transports. (Pull #1399) +* Add mount API support, using `httpx.Client(mounts=...)`. (Pull #1362) +* Add `chunk_size` parameter to `iter_raw()`, `iter_bytes()`, `iter_text()`. (Pull #1277) +* Add `keepalive_expiry` parameter to `httpx.Limits()` configuration. (Pull #1398) +* Add repr to `httpx.Cookies` to display available cookies. (Pull #1411) +* Add support for `params=` (previously only `params=` was supported). (Pull #1426) + +### Fixed + +* Add missing `raw_path` to ASGI scope. (Pull #1357) +* Tweak `create_ssl_context` defaults to use `trust_env=True`. (Pull #1447) +* Properly URL-escape WSGI `PATH_INFO`. (Pull #1391) +* Properly set default ports in WSGI transport. (Pull #1469) +* Properly encode slashes when using `base_url`. (Pull #1407) +* Properly map exceptions in `request.aclose()`. (Pull #1465) + +## 0.16.1 (October 8th, 2020) + +### Fixed + +* Support literal IPv6 addresses in URLs. (Pull #1349) +* Force lowercase headers in ASGI scope dictionaries. (Pull #1351) + +## 0.16.0 (October 6th, 2020) + +### Changed + +* Preserve HTTP header casing. (Pull #1338, encode/httpcore#216, python-hyper/h11#104) +* Drop `response.next()` and `response.anext()` methods in favour of `response.next_request` attribute. (Pull #1339) +* Closed clients now raise a runtime error if attempting to send a request. (Pull #1346) + +### Added + +* Add Python 3.9 to officially supported versions. +* Type annotate `__enter__`/`__exit__`/`__aenter__`/`__aexit__` in a way that supports subclasses of `Client` and `AsyncClient`. (Pull #1336) + +## 0.15.5 (October 1st, 2020) + +### Added + +* Add `response.next_request` (Pull #1334) + +## 0.15.4 (September 25th, 2020) + +### Added + +* Support direct comparisons between `Headers` and dicts or lists of two-tuples. Eg. `assert response.headers == {"Content-Length": 24}` (Pull #1326) + +### Fixed + +* Fix automatic `.read()` when `Response` instances are created with `content=` (Pull #1324) + +## 0.15.3 (September 24th, 2020) + +### Fixed + +* Fixed connection leak in async client due to improper closing of response streams. (Pull #1316) + +## 0.15.2 (September 23nd, 2020) + +### Fixed + +* Fixed `response.elapsed` property. (Pull #1313) +* Fixed client authentication interaction with `.stream()`. (Pull #1312) + +## 0.15.1 (September 23nd, 2020) + +### Fixed + +* ASGITransport now properly applies URL decoding to the `path` component, as-per the ASGI spec. (Pull #1307) + +## 0.15.0 (September 22nd, 2020) + +### Added + +* Added support for curio. (Pull https://github.com/encode/httpcore/pull/168) +* Added support for event hooks. (Pull #1246) +* Added support for authentication flows which require either sync or async I/O. (Pull #1217) +* Added support for monitoring download progress with `response.num_bytes_downloaded`. (Pull #1268) +* Added `Request(content=...)` for byte content, instead of overloading `Request(data=...)` (Pull #1266) +* Added support for all URL components as parameter names when using `url.copy_with(...)`. (Pull #1285) +* Neater split between automatically populated headers on `Request` instances, vs default `client.headers`. (Pull #1248) +* Unclosed `AsyncClient` instances will now raise warnings if garbage collected. (Pull #1197) +* Support `Response(content=..., text=..., html=..., json=...)` for creating usable response instances in code. (Pull #1265, #1297) +* Support instantiating requests from the low-level transport API. (Pull #1293) +* Raise errors on invalid URL types. (Pull #1259) + +### Changed + +* Cleaned up expected behaviour for URL escaping. `url.path` is now URL escaped. (Pull #1285) +* Cleaned up expected behaviour for bytes vs str in URL components. `url.userinfo` and `url.query` are not URL escaped, and so return bytes. (Pull #1285) +* Drop `url.authority` property in favour of `url.netloc`, since "authority" was semantically incorrect. (Pull #1285) +* Drop `url.full_path` property in favour of `url.raw_path`, for better consistency with other parts of the API. (Pull #1285) +* No longer use the `chardet` library for auto-detecting charsets, instead defaulting to a simpler approach when no charset is specified. (#1269) + +### Fixed + +* Swapped ordering of redirects and authentication flow. (Pull #1267) +* `.netrc` lookups should use host, not host+port. (Pull #1298) + +### Removed + +* The `URLLib3Transport` class no longer exists. We've published it instead as an example of [a custom transport class](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e). (Pull #1182) +* Drop `request.timer` attribute, which was being used internally to set `response.elapsed`. (Pull #1249) +* Drop `response.decoder` attribute, which was being used internally. (Pull #1276) +* `Request.prepare()` is now a private method. (Pull #1284) +* The `Headers.getlist()` method had previously been deprecated in favour of `Headers.get_list()`. It is now fully removed. +* The `QueryParams.getlist()` method had previously been deprecated in favour of `QueryParams.get_list()`. It is now fully removed. +* The `URL.is_ssl` property had previously been deprecated in favour of `URL.scheme == "https"`. It is now fully removed. +* The `httpx.PoolLimits` class had previously been deprecated in favour of `httpx.Limits`. It is now fully removed. +* The `max_keepalive` setting had previously been deprecated in favour of the more explicit `max_keepalive_connections`. It is now fully removed. +* The verbose `httpx.Timeout(5.0, connect_timeout=60.0)` style had previously been deprecated in favour of `httpx.Timeout(5.0, connect=60.0)`. It is now fully removed. +* Support for instantiating a timeout config missing some defaults, such as `httpx.Timeout(connect=60.0)`, had previously been deprecated in favour of enforcing a more explicit style, such as `httpx.Timeout(5.0, connect=60.0)`. This is now strictly enforced. + +## 0.14.3 (September 2nd, 2020) + +### Added + +* `http.Response()` may now be instantiated without a `request=...` parameter. Useful for some unit testing cases. (Pull #1238) +* Add `103 Early Hints` and `425 Too Early` status codes. (Pull #1244) + +### Fixed + +* `DigestAuth` now handles responses that include multiple 'WWW-Authenticate' headers. (Pull #1240) +* Call into transport `__enter__`/`__exit__` or `__aenter__`/`__aexit__` when client is used in a context manager style. (Pull #1218) + +## 0.14.2 (August 24th, 2020) + +### Added + +* Support `client.get(..., auth=None)` to bypass the default authentication on a clients. (Pull #1115) +* Support `client.auth = ...` property setter. (Pull #1185) +* Support `httpx.get(..., proxies=...)` on top-level request functions. (Pull #1198) +* Display instances with nicer import styles. (Eg. ) (Pull #1155) +* Support `cookies=[(key, value)]` list-of-two-tuples style usage. (Pull #1211) + +### Fixed + +* Ensure that automatically included headers on a request may be modified. (Pull #1205) +* Allow explicit `Content-Length` header on streaming requests. (Pull #1170) +* Handle URL quoted usernames and passwords properly. (Pull #1159) +* Use more consistent default for `HEAD` requests, setting `allow_redirects=True`. (Pull #1183) +* If a transport error occurs while streaming the response, raise an `httpx` exception, not the underlying `httpcore` exception. (Pull #1190) +* Include the underlying `httpcore` traceback, when transport exceptions occur. (Pull #1199) + +## 0.14.1 (August 11th, 2020) + +### Added + +* The `httpx.URL(...)` class now raises `httpx.InvalidURL` on invalid URLs, rather than exposing the underlying `rfc3986` exception. If a redirect response includes an invalid 'Location' header, then a `RemoteProtocolError` exception is raised, which will be associated with the request that caused it. (Pull #1163) + +### Fixed + +* Handling multiple `Set-Cookie` headers became broken in the 0.14.0 release, and is now resolved. (Pull #1156) + +## 0.14.0 (August 7th, 2020) + +The 0.14 release includes a range of improvements to the public API, intended on preparing for our upcoming 1.0 release. + +* Our HTTP/2 support is now fully optional. **You now need to use `pip install httpx[http2]` if you want to include the HTTP/2 dependancies.** +* Our HSTS support has now been removed. Rewriting URLs from `http` to `https` if the host is on the HSTS list can be beneficial in avoiding roundtrips to incorrectly formed URLs, but on balance we've decided to remove this feature, on the principle of least surprise. Most programmatic clients do not include HSTS support, and for now we're opting to remove our support for it. +* Our exception hierarchy has been overhauled. Most users will want to stick with their existing `httpx.HTTPError` usage, but we've got a clearer overall structure now. See https://www.python-httpx.org/exceptions/ for more details. + +When upgrading you should be aware of the following public API changes. Note that deprecated usages will currently continue to function, but will issue warnings. + +* You should now use `httpx.codes` consistently instead of `httpx.StatusCodes`. +* Usage of `httpx.Timeout()` should now always include an explicit default. Eg. `httpx.Timeout(None, pool=5.0)`. +* When using `httpx.Timeout()`, we now have more concisely named keyword arguments. Eg. `read=5.0`, instead of `read_timeout=5.0`. +* Use `httpx.Limits()` instead of `httpx.PoolLimits()`, and `limits=...` instead of `pool_limits=...`. +* The `httpx.Limits(max_keepalive=...)` argument is now deprecated in favour of a more explicit `httpx.Limits(max_keepalive_connections=...)`. +* Keys used with `Client(proxies={...})` should now be in the style of `{"http://": ...}`, rather than `{"http": ...}`. +* The multidict methods `Headers.getlist()` and `QueryParams.getlist()` are deprecated in favour of more consistent `.get_list()` variants. +* The `URL.is_ssl` property is deprecated in favour of `URL.scheme == "https"`. +* The `URL.join(relative_url=...)` method is now `URL.join(url=...)`. This change does not support warnings for the deprecated usage style. + +One notable aspect of the 0.14.0 release is that it tightens up the public API for `httpx`, by ensuring that several internal attributes and methods have now become strictly private. + +The following previously had nominally public names on the client, but were all undocumented and intended solely for internal usage. They are all now replaced with underscored names, and should not be relied on or accessed. + +These changes should not affect users who have been working from the `httpx` documentation. + +* `.merge_url()`, `.merge_headers()`, `.merge_cookies()`, `.merge_queryparams()` +* `.build_auth()`, `.build_redirect_request()` +* `.redirect_method()`, `.redirect_url()`, `.redirect_headers()`, `.redirect_stream()` +* `.send_handling_redirects()`, `.send_handling_auth()`, `.send_single_request()` +* `.init_transport()`, `.init_proxy_transport()` +* `.proxies`, `.transport`, `.netrc`, `.get_proxy_map()` + +See pull requests #997, #1065, #1071. + +Some areas of API which were already on the deprecation path, and were raising warnings or errors in 0.13.x have now been escalated to being fully removed. + +* Drop `ASGIDispatch`, `WSGIDispatch`, which have been replaced by `ASGITransport`, `WSGITransport`. +* Drop `dispatch=...`` on client, which has been replaced by `transport=...`` +* Drop `soft_limit`, `hard_limit`, which have been replaced by `max_keepalive` and `max_connections`. +* Drop `Response.stream` and` `Response.raw`, which have been replaced by ``.aiter_bytes` and `.aiter_raw`. +* Drop `proxies=` in favor of `proxies=httpx.Proxy(...)`. + +See pull requests #1057, #1058. + +### Added + +* Added dedicated exception class `httpx.HTTPStatusError` for `.raise_for_status()` exceptions. (Pull #1072) +* Added `httpx.create_ssl_context()` helper function. (Pull #996) +* Support for proxy exlcusions like `proxies={"https://www.example.com": None}`. (Pull #1099) +* Support `QueryParams(None)` and `client.params = None`. (Pull #1060) + +### Changed + +* Use `httpx.codes` consistently in favour of `httpx.StatusCodes` which is placed into deprecation. (Pull #1088) +* Usage of `httpx.Timeout()` should now always include an explicit default. Eg. `httpx.Timeout(None, pool=5.0)`. (Pull #1085) +* Switch to more concise `httpx.Timeout()` keyword arguments. Eg. `read=5.0`, instead of `read_timeout=5.0`. (Pull #1111) +* Use `httpx.Limits()` instead of `httpx.PoolLimits()`, and `limits=...` instead of `pool_limits=...`. (Pull #1113) +* Keys used with `Client(proxies={...})` should now be in the style of `{"http://": ...}`, rather than `{"http": ...}`. (Pull #1127) +* The multidict methods `Headers.getlist` and `QueryParams.getlist` are deprecated in favour of more consistent `.get_list()` variants. (Pull #1089) +* `URL.port` becomes `Optional[int]`. Now only returns a port if one is explicitly included in the URL string. (Pull #1080) +* The `URL(..., allow_relative=[bool])` parameter no longer exists. All URL instances may be relative. (Pull #1073) +* Drop unnecessary `url.full_path = ...` property setter. (Pull #1069) +* The `URL.join(relative_url=...)` method is now `URL.join(url=...)`. (Pull #1129) +* The `URL.is_ssl` property is deprecated in favour of `URL.scheme == "https"`. (Pull #1128) + +### Fixed + +* Add missing `Response.next()` method. (Pull #1055) +* Ensure all exception classes are exposed as public API. (Pull #1045) +* Support multiple items with an identical field name in multipart encodings. (Pull #777) +* Skip HSTS preloading on single-label domains. (Pull #1074) +* Fixes for `Response.iter_lines()`. (Pull #1033, #1075) +* Ignore permission errors when accessing `.netrc` files. (Pull #1104) +* Allow bare hostnames in `HTTP_PROXY` etc... environment variables. (Pull #1120) +* Settings `app=...` or `transport=...` bypasses any environment based proxy defaults. (Pull #1122) +* Fix handling of `.base_url` when a path component is included in the base URL. (Pull #1130) + +--- + +## 0.13.3 (May 29th, 2020) + +### Fixed + +* Include missing keepalive expiry configuration. (Pull #1005) +* Improved error message when URL redirect has a custom scheme. (Pull #1002) + +## 0.13.2 (May 27th, 2020) + +### Fixed + +* Include explicit "Content-Length: 0" on POST, PUT, PATCH if no request body is used. (Pull #995) +* Add `http2` option to `httpx.Client`. (Pull #982) +* Tighten up API typing in places. (Pull #992, #999) + +## 0.13.1 (May 22nd, 2020) + +### Fixed + +* Fix pool options deprecation warning. (Pull #980) +* Include `httpx.URLLib3ProxyTransport` in top-level API. (Pull #979) + +## 0.13.0 (May 22nd, 2020) + +This release switches to `httpcore` for all the internal networking, which means: + +* We're using the same codebase for both our sync and async clients. +* HTTP/2 support is now available with the sync client. +* We no longer have a `urllib3` dependency for our sync client, although there is still an *optional* `URLLib3Transport` class. + +It also means we've had to remove our UDS support, since maintaining that would have meant having to push back our work towards a 1.0 release, which isn't a trade-off we wanted to make. + +We also now have [a public "Transport API"](https://www.python-httpx.org/advanced/#custom-transports), which you can use to implement custom transport implementations against. This formalises and replaces our previously private "Dispatch API". + +### Changed + +* Use `httpcore` for underlying HTTP transport. Drop `urllib3` requirement. (Pull #804, #967) +* Rename pool limit options from `soft_limit`/`hard_limit` to `max_keepalive`/`max_connections`. (Pull #968) +* The previous private "Dispatch API" has now been promoted to a public "Transport API". When customizing the transport use `transport=...`. The `ASGIDispatch` and `WSGIDispatch` class naming is deprecated in favour of `ASGITransport` and `WSGITransport`. (Pull #963) + +### Added + +* Added `URLLib3Transport` class for optional `urllib3` transport support. (Pull #804, #963) +* Streaming multipart uploads. (Pull #857) +* Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull encode/httpcore#79) + +### Fixed + +* Performance improvement in brotli decoder. (Pull #906) +* Proper warning level of deprecation notice in `Response.stream` and `Response.raw`. (Pull #908) +* Fix support for generator based WSGI apps. (Pull #887) +* Reuse of connections on HTTP/2 in close concurrency situations. (Pull encode/httpcore#81) +* Honor HTTP/2 max concurrent streams settings (Pull encode/httpcore#89, encode/httpcore#90) +* Fix bytes support in multipart uploads. (Pull #974) +* Improve typing support for `files=...`. (Pull #976) + +### Removed + +* Dropped support for `Client(uds=...)` (Pull #804) + +## 0.13.0.dev2 (May 12th, 2020) + +The 0.13.0.dev2 is a *pre-release* version. To install it, use `pip install httpx --pre`. + +### Added + +* Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (HTTPCore Pull #79) + +### Fixed + +* Reuse of connections on HTTP/2 in close concurrency situations. (HTTPCore Pull #81) +* When using an `app=` observe neater disconnect behaviour instead of sending empty body messages. (Pull #919) + +## 0.13.0.dev1 (May 6th, 2020) + +The 0.13.0.dev1 is a *pre-release* version. To install it, use `pip install httpx --pre`. + +### Fixed + +* Passing `http2` flag to proxy dispatchers. (Pull #934) +* Use [`httpcore` v0.8.3](https://github.com/encode/httpcore/releases/tag/0.8.3) +which addresses problems in handling of headers when using proxies. + +## 0.13.0.dev0 (April 30th, 2020) + +The 0.13.0.dev0 is a *pre-release* version. To install it, use `pip install httpx --pre`. + +This release switches to `httpcore` for all the internal networking, which means: + +* We're using the same codebase for both our sync and async clients. +* HTTP/2 support is now available with the sync client. +* We no longer have a `urllib3` dependency for our sync client, although there is still an *optional* `URLLib3Dispatcher` class. + +It also means we've had to remove our UDS support, since maintaining that would have meant having to push back our work towards a 1.0 release, which isn't a trade-off we wanted to make. + +### Changed + +* Use `httpcore` for underlying HTTP transport. Drop `urllib3` requirement. (Pull #804) + +### Added + +* Added `URLLib3Dispatcher` class for optional `urllib3` transport support. (Pull #804) +* Streaming multipart uploads. (Pull #857) + +### Fixed + +* Performance improvement in brotli decoder. (Pull #906) +* Proper warning level of deprecation notice in `Response.stream` and `Response.raw`. (Pull #908) +* Fix support for generator based WSGI apps. (Pull #887) + +### Removed + +* Dropped support for `Client(uds=...)` (Pull #804) + +--- + +## 0.12.1 (March 19th, 2020) + +### Fixed + +* Resolved packaging issue, where additional files were being included. + +## 0.12.0 (March 9th, 2020) + +The 0.12 release tightens up the API expectations for `httpx` by switching to private module names to enforce better clarity around public API. + +All imports of `httpx` should import from the top-level package only, such as `from httpx import Request`, rather than importing from privately namespaced modules such as `from httpx._models import Request`. + +### Added + +* Support making response body available to auth classes with `.requires_response_body`. (Pull #803) +* Export `NetworkError` exception. (Pull #814) +* Add support for `NO_PROXY` environment variable. (Pull #835) + +### Changed + +* Switched to private module names. (Pull #785) +* Drop redirect looping detection and the `RedirectLoop` exception, instead using `TooManyRedirects`. (Pull #819) +* Drop `backend=...` parameter on `AsyncClient`, in favour of always autodetecting `trio`/`asyncio`. (Pull #791) + +### Fixed + +* Support basic auth credentials in proxy URLs. (Pull #780) +* Fix `httpx.Proxy(url, mode="FORWARD_ONLY")` configuration. (Pull #788) +* Fallback to setting headers as UTF-8 if no encoding is specified. (Pull #820) +* Close proxy dispatches classes on client close. (Pull #826) +* Support custom `cert` parameters even if `verify=False`. (Pull #796) +* Don't support invalid dict-of-dicts form data in `data=...`. (Pull #811) + +--- + +## 0.11.1 (January 17th, 2020) + +### Fixed + +* Fixed usage of `proxies=...` on `Client()`. (Pull #763) +* Support both `zlib` and `deflate` style encodings on `Content-Encoding: deflate`. (Pull #758) +* Fix for streaming a redirect response body with `allow_redirects=False`. (Pull #766) +* Handle redirect with malformed Location headers missing host. (Pull #774) + +## 0.11.0 (January 9th, 2020) + +The 0.11 release reintroduces our sync support, so that `httpx` now supports both a standard thread-concurrency API, and an async API. + +Existing async `httpx` users that are upgrading to 0.11 should ensure that: + +* Async codebases should always use a client instance to make requests, instead of the top-level API. +* The async client is named as `httpx.AsyncClient()`, instead of `httpx.Client()`. +* When instantiating proxy configurations use the `httpx.Proxy()` class, instead of the previous `httpx.HTTPProxy()`. This new configuration class works for configuring both sync and async clients. + +We believe the API is now pretty much stable, and are aiming for a 1.0 release sometime on or before April 2020. + +### Changed + +- Top level API such as `httpx.get(url, ...)`, `httpx.post(url, ...)`, `httpx.request(method, url, ...)` becomes synchronous. +- Added `httpx.Client()` for synchronous clients, with `httpx.AsyncClient` being used for async clients. +- Switched to `proxies=httpx.Proxy(...)` for proxy configuration. +- Network connection errors are wrapped in `httpx.NetworkError`, rather than exposing lower-level exception types directly. + +### Removed + +- The `request.url.origin` property and `httpx.Origin` class are no longer available. +- The per-request `cert`, `verify`, and `trust_env` arguments are escalated from raising errors if used, to no longer being available. These arguments should be used on a per-client instance instead, or in the top-level API. +- The `stream` argument has escalated from raising an error when used, to no longer being available. Use the `client.stream(...)` or `httpx.stream()` streaming API instead. + +### Fixed + +- Redirect loop detection matches against `(method, url)` rather than `url`. (Pull #734) + +--- + +## 0.10.1 (December 31st, 2019) + +### Fixed + +- Fix issue with concurrent connection acquiry. (Pull #700) +- Fix write error on closing HTTP/2 connections. (Pull #699) + +## 0.10.0 (December 29th, 2019) + +The 0.10.0 release makes some changes that will allow us to support both sync and async interfaces. + +In particular with streaming responses the `response.read()` method becomes `response.aread()`, and the `response.close()` method becomes `response.aclose()`. + +If following redirects explicitly the `response.next()` method becomes `response.anext()`. + +### Fixed + +- End HTTP/2 streams immediately on no-body requests, rather than sending an empty body message. (Pull #682) +- Improve typing for `Response.request`: switch from `Optional[Request]` to `Request`. (Pull #666) +- `Response.elapsed` now reflects the entire download time. (Pull #687, #692) + +### Changed + +- Added `AsyncClient` as a synonym for `Client`. (Pull #680) +- Switch to `response.aread()` for conditionally reading streaming responses. (Pull #674) +- Switch to `response.aclose()` and `client.aclose()` for explicit closing. (Pull #674, #675) +- Switch to `response.anext()` for resolving the next redirect response. (Pull #676) + +### Removed + +- When using a client instance, the per-request usage of `verify`, `cert`, and `trust_env` have now escalated from raising a warning to raising an error. You should set these arguments on the client instead. (Pull #617) +- Removed the undocumented `request.read()`, since end users should not require it. + +--- + +## 0.9.5 (December 20th, 2019) + +### Fixed + +- Fix Host header and HSTS rewrites when an explicit `:80` port is included in URL. (Pull #649) +- Query Params on the URL string are merged with any `params=...` argument. (Pull #653) +- More robust behavior when closing connections. (Pull #640) +- More robust behavior when handling HTTP/2 headers with trailing whitespace. (Pull #637) +- Allow any explicit `Content-Type` header to take precedence over the encoding default. (Pull #633) + +## 0.9.4 (December 12th, 2019) + +### Fixed + +- Added expiry to Keep-Alive connections, resolving issues with acquiring connections. (Pull #627) +- Increased flow control windows on HTTP/2, resolving download speed issues. (Pull #629) + +## 0.9.3 (December 7th, 2019) + +### Fixed + +- Fixed HTTP/2 with autodetection backend. (Pull #614) + +## 0.9.2 (December 7th, 2019) + +* Released due to packaging build artifact. + +## 0.9.1 (December 6th, 2019) + +* Released due to packaging build artifact. + +## 0.9.0 (December 6th, 2019) + +The 0.9 releases brings some major new features, including: + +* A new streaming API. +* Autodetection of either asyncio or trio. +* Nicer timeout configuration. +* HTTP/2 support off by default, but can be enabled. + +We've also removed all private types from the top-level package export. + +In order to ensure you are only ever working with public API you should make +sure to only import the top-level package eg. `import httpx`, rather than +importing modules within the package. + +### Added + +- Added concurrency backend autodetection. (Pull #585) +- Added `Client(backend='trio')` and `Client(backend='asyncio')` API. (Pull #585) +- Added `response.stream_lines()` API. (Pull #575) +- Added `response.is_error` API. (Pull #574) +- Added support for `timeout=Timeout(5.0, connect_timeout=60.0)` styles. (Pull #593) + +### Fixed + +- Requests or Clients with `timeout=None` now correctly always disable timeouts. (Pull #592) +- Request 'Authorization' headers now have priority over `.netrc` authentication info. (Commit 095b691) +- Files without a filename no longer set a Content-Type in multipart data. (Commit ed94950) + +### Changed + +- Added `httpx.stream()` API. Using `stream=True` now results in a warning. (Pull #600, #610) +- HTTP/2 support is switched to "off by default", but can be enabled explicitly. (Pull #584) +- Switched to `Client(http2=True)` API from `Client(http_versions=["HTTP/1.1", "HTTP/2"])`. (Pull #586) +- Removed all private types from the top-level package export. (Pull #608) +- The SSL configuration settings of `verify`, `cert`, and `trust_env` now raise warnings if used per-request when using a Client instance. They should always be set on the Client instance itself. (Pull #597) +- Use plain strings "TUNNEL_ONLY" or "FORWARD_ONLY" on the HTTPProxy `proxy_mode` argument. The `HTTPProxyMode` enum still exists, but its usage will raise warnings. (#610) +- Pool timeouts are now on the timeout configuration, not the pool limits configuration. (Pull #563) +- The timeout configuration is now named `httpx.Timeout(...)`, not `httpx.TimeoutConfig(...)`. The old version currently remains as a synonym for backwards compatability. (Pull #591) + +--- + +## 0.8.0 (November 27, 2019) + +### Removed + +- The synchronous API has been removed, in order to allow us to fundamentally change how we approach supporting both sync and async variants. (See #588 for more details.) + +--- + +## 0.7.8 (November 17, 2019) + +### Added + +- Add support for proxy tunnels for Python 3.6 + asyncio. (Pull #521) + +## 0.7.7 (November 15, 2019) + +### Fixed + +- Resolve an issue with cookies behavior on redirect requests. (Pull #529) + +### Added + +- Add request/response DEBUG logs. (Pull #502) +- Use TRACE log level for low level info. (Pull #500) + +## 0.7.6 (November 2, 2019) + +### Removed + +- Drop `proxies` parameter from the high-level API. (Pull #485) + +### Fixed + +- Tweak multipart files: omit null filenames, add support for `str` file contents. (Pull #482) +- Cache NETRC authentication per-client. (Pull #400) +- Rely on `getproxies` for all proxy environment variables. (Pull #470) +- Wait for the `asyncio` stream to close when closing a connection. (Pull #494) + +## 0.7.5 (October 10, 2019) + +### Added + +- Allow lists of values to be passed to `params`. (Pull #386) +- `ASGIDispatch`, `WSGIDispatch` are now available in the `httpx.dispatch` namespace. (Pull #407) +- `HTTPError` is now available in the `httpx` namespace. (Pull #421) +- Add support for `start_tls()` to the Trio concurrency backend. (Pull #467) + +### Fixed + +- Username and password are no longer included in the `Host` header when basic authentication + credentials are supplied via the URL. (Pull #417) + +### Removed + +- The `.delete()` function no longer has `json`, `data`, or `files` parameters + to match the expected semantics of the `DELETE` method. (Pull #408) +- Removed the `trio` extra. Trio support is detected automatically. (Pull #390) + +## 0.7.4 (September 25, 2019) + +### Added + +- Add Trio concurrency backend. (Pull #276) +- Add `params` parameter to `Client` for setting default query parameters. (Pull #372) +- Add support for `SSL_CERT_FILE` and `SSL_CERT_DIR` environment variables. (Pull #307) +- Add debug logging to calls into ASGI apps. (Pull #371) +- Add debug logging to SSL configuration. (Pull #378) + +### Fixed + +- Fix a bug when using `Client` without timeouts in Python 3.6. (Pull #383) +- Propagate `Client` configuration to HTTP proxies. (Pull #377) + +## 0.7.3 (September 20, 2019) + +### Added + +- HTTP Proxy support. (Pulls #259, #353) +- Add Digest authentication. (Pull #332) +- Add `.build_request()` method to `Client` and `AsyncClient`. (Pull #319) +- Add `.elapsed` property on responses. (Pull #351) +- Add support for `SSLKEYLOGFILE` in Python 3.8b4+. (Pull #301) + +### Removed + +- Drop NPN support for HTTP version negotiation. (Pull #314) + +### Fixed + +- Fix distribution of type annotations for mypy (Pull #361). +- Set `Host` header when redirecting cross-origin. (Pull #321) +- Drop `Content-Length` headers on `GET` redirects. (Pull #310) +- Raise `KeyError` if header isn't found in `Headers`. (Pull #324) +- Raise `NotRedirectResponse` in `response.next()` if there is no redirection to perform. (Pull #297) +- Fix bug in calculating the HTTP/2 maximum frame size. (Pull #153) + +## 0.7.2 (August 28, 2019) + +- Enforce using `httpx.AsyncioBackend` for the synchronous client. (Pull #232) +- `httpx.ConnectionPool` will properly release a dropped connection. (Pull #230) +- Remove the `raise_app_exceptions` argument from `Client`. (Pull #238) +- `DecodeError` will no longer be raised for an empty body encoded with Brotli. (Pull #237) +- Added `http_versions` parameter to `Client`. (Pull #250) +- Only use HTTP/1.1 on short-lived connections like `httpx.get()`. (Pull #284) +- Convert `Client.cookies` and `Client.headers` when set as a property. (Pull #274) +- Setting `HTTPX_DEBUG=1` enables debug logging on all requests. (Pull #277) + +## 0.7.1 (August 18, 2019) + +- Include files with source distribution to be installable. (Pull #233) + +## 0.7.0 (August 17, 2019) + +- Add the `trust_env` property to `BaseClient`. (Pull #187) +- Add the `links` property to `BaseResponse`. (Pull #211) +- Accept `ssl.SSLContext` instances into `SSLConfig(verify=...)`. (Pull #215) +- Add `Response.stream_text()` with incremental encoding detection. (Pull #183) +- Properly updated the `Host` header when a redirect changes the origin. (Pull #199) +- Ignore invalid `Content-Encoding` headers. (Pull #196) +- Use `~/.netrc` and `~/_netrc` files by default when `trust_env=True`. (Pull #189) +- Create exception base class `HTTPError` with `request` and `response` properties. (Pull #162) +- Add HSTS preload list checking within `BaseClient` to upgrade HTTP URLs to HTTPS. (Pull #184) +- Switch IDNA encoding from IDNA 2003 to IDNA 2008. (Pull #161) +- Expose base classes for alternate concurrency backends. (Pull #178) +- Improve Multipart parameter encoding. (Pull #167) +- Add the `headers` proeprty to `BaseClient`. (Pull #159) +- Add support for Google's `brotli` library. (Pull #156) +- Remove deprecated TLS versions (TLSv1 and TLSv1.1) from default `SSLConfig`. (Pull #155) +- Fix `URL.join(...)` to work similarly to RFC 3986 URL joining. (Pull #144) + +--- + +## 0.6.8 (July 25, 2019) + +- Check for disconnections when searching for an available + connection in `ConnectionPool.keepalive_connections` (Pull #145) +- Allow string comparison for `URL` objects (Pull #139) +- Add HTTP status codes 418 and 451 (Pull #135) +- Add support for client certificate passwords (Pull #118) +- Enable post-handshake client cert authentication for TLSv1.3 (Pull #118) +- Disable using `commonName` for hostname checking for OpenSSL 1.1.0+ (Pull #118) +- Detect encoding for `Response.json()` (Pull #116) + +## 0.6.7 (July 8, 2019) + +- Check for connection aliveness on re-acquiry (Pull #111) + +## 0.6.6 (July 3, 2019) + +- Improve `USER_AGENT` (Pull #110) +- Add `Connection: keep-alive` by default to HTTP/1.1 connections. (Pull #110) + +## 0.6.5 (June 27, 2019) + +- Include `Host` header by default. (Pull #109) +- Improve HTTP protocol detection. (Pull #107) + +## 0.6.4 (June 25, 2019) + +- Implement read and write timeouts (Pull #104) + +## 0.6.3 (June 24, 2019) + +- Handle early connection closes (Pull #103) + +## 0.6.2 (June 23, 2019) + +- Use urllib3's `DEFAULT_CIPHERS` for the `SSLConfig` object. (Pull #100) + +## 0.6.1 (June 21, 2019) + +- Add support for setting a `base_url` on the `Client`. + +## 0.6.0 (June 21, 2019) + +- Honor `local_flow_control_window` for HTTP/2 connections (Pull #98) + + diff --git a/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/RECORD b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/RECORD new file mode 100644 index 0000000..3298c3e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/RECORD @@ -0,0 +1,49 @@ +httpx-0.18.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.18.2.dist-info/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx-0.18.2.dist-info/METADATA,sha256=M4xmaC8FXJlOWy5XauBXlgLbYLi3UOB41nBB-PIGZ_E,44718 +httpx-0.18.2.dist-info/RECORD,, +httpx-0.18.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +httpx-0.18.2.dist-info/top_level.txt,sha256=8QYqFolXm27kV0x-8K8V5t-uZskSHKtq8jZVxGwtIq4,24 +httpx/__init__.py,sha256=UaP-xFey6dHDXR9KS5XZF9otl_3WNdk_2xGc1pB_7CE,2761 +httpx/__pycache__/__init__.cpython-39.pyc,, +httpx/__pycache__/__version__.cpython-39.pyc,, +httpx/__pycache__/_api.cpython-39.pyc,, +httpx/__pycache__/_auth.cpython-39.pyc,, +httpx/__pycache__/_client.cpython-39.pyc,, +httpx/__pycache__/_compat.cpython-39.pyc,, +httpx/__pycache__/_config.cpython-39.pyc,, +httpx/__pycache__/_content.cpython-39.pyc,, +httpx/__pycache__/_decoders.cpython-39.pyc,, +httpx/__pycache__/_exceptions.cpython-39.pyc,, +httpx/__pycache__/_models.cpython-39.pyc,, +httpx/__pycache__/_multipart.cpython-39.pyc,, +httpx/__pycache__/_status_codes.cpython-39.pyc,, +httpx/__pycache__/_types.cpython-39.pyc,, +httpx/__pycache__/_utils.cpython-39.pyc,, +httpx/__version__.py,sha256=zSYF8zemUHR-AOPXrxRhqs3fnYIblJerxdT5sdRZ8LU,108 +httpx/_api.py,sha256=HQxn11Qq20DXoSLNDTADpHsNaZZc1LbeQ6UT7dNkkCw,11676 +httpx/_auth.py,sha256=_oB2rvFKngdFpBvFSZKM1k7U1Q4rqRfimCmb7DmtVB0,10242 +httpx/_client.py,sha256=LnpOiIhR_jY_1syD6cGOz-3zRNPdvqCKbCx017R75ck,63058 +httpx/_compat.py,sha256=ifJKFjmqMdB94yDdFELuTfExjg9NL6XgU7kN5XV6vJY,937 +httpx/_config.py,sha256=zTmxLDbFn4AYISj3BWPFrSnEK6awwN2atpXbJhyMVnY,12108 +httpx/_content.py,sha256=Z48LbGjD2tLH_oPB1dISGi4tpGWg-ncOngclWJblBGQ,6916 +httpx/_decoders.py,sha256=bw5WG3Pra1UbzeoqUMnMNoC_l_UJeSzi1mtSljyJwKw,11705 +httpx/_exceptions.py,sha256=MOrPYbCWreCtlgwn1msgaaTrvFBAM6t5GXe4X8ud9aM,7797 +httpx/_models.py,sha256=8xtd5DylK1qTO-_i7mbmHrNZcbEZjPgNcNs1Z_lOBas,65645 +httpx/_multipart.py,sha256=nuqiW8HT4D1d2W6TeL_hX5LDvPSMEq-yqMKT2MqQisM,6634 +httpx/_status_codes.py,sha256=b4bJYEAu6SsNKx1VhYAaM1UA20h7TyokwU57k3UuCqE,5313 +httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx/_transports/__pycache__/__init__.cpython-39.pyc,, +httpx/_transports/__pycache__/asgi.cpython-39.pyc,, +httpx/_transports/__pycache__/base.cpython-39.pyc,, +httpx/_transports/__pycache__/default.cpython-39.pyc,, +httpx/_transports/__pycache__/mock.cpython-39.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-39.pyc,, +httpx/_transports/asgi.py,sha256=yGmxK-GImAyCRzDUwlX7rFNLeRiohorlJEt2t04_tp0,5189 +httpx/_transports/base.py,sha256=vsxknZSyqLrd0bUTG7xqEjIJUEYyyEJd1QpWGLBd0Hk,6723 +httpx/_transports/default.py,sha256=_-rv1M-i2IZJGsGGI_e9Ubj7OrrhcI-fvtHkX0BFMz0,9539 +httpx/_transports/mock.py,sha256=ITDBS0y8Jg_yTNKXz3SSEnlNRD-c9Yws_I1Xh3JB_Vo,2063 +httpx/_transports/wsgi.py,sha256=6JCzGQQ8DlVO7YvRt_66X6xzCu9y-CGe5Lxsij5c-j4,4481 +httpx/_types.py,sha256=hDqT0vu7Mnak-JyIKnFI6LYOKDBda35zH65BHJiAtrs,2216 +httpx/_utils.py,sha256=yen2GFqPpU8VUQ0vuPOwu31XFE4ocsa9FheV6aq4qGs,16568 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/WHEEL new file mode 100644 index 0000000..385faab --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/top_level.txt new file mode 100644 index 0000000..c180eb2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx-0.18.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +httpx +httpx/_transports diff --git a/.venv/lib/python3.9/site-packages/httpx/__init__.py b/.venv/lib/python3.9/site-packages/httpx/__init__.py new file mode 100644 index 0000000..4af3904 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/__init__.py @@ -0,0 +1,124 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import delete, get, head, options, patch, post, put, request, stream +from ._auth import Auth, BasicAuth, DigestAuth +from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client +from ._config import Limits, Proxy, Timeout, create_ssl_context +from ._content import ByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + CookieConflict, + DecodingError, + HTTPError, + HTTPStatusError, + InvalidURL, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + RequestError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + StreamError, + TimeoutException, + TooManyRedirects, + TransportError, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Cookies, Headers, QueryParams, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import ( + AsyncBaseTransport, + AsyncByteStream, + BaseTransport, + SyncByteStream, +) +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.mock import MockTransport +from ._transports.wsgi import WSGITransport + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "MockTransport", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/.venv/lib/python3.9/site-packages/httpx/__version__.py b/.venv/lib/python3.9/site-packages/httpx/__version__.py new file mode 100644 index 0000000..cc82965 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.18.2" diff --git a/.venv/lib/python3.9/site-packages/httpx/_api.py b/.venv/lib/python3.9/site-packages/httpx/_api.py new file mode 100644 index 0000000..da81853 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_api.py @@ -0,0 +1,445 @@ +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) + + +def request( + method: str, + url: URLTypes, + *, + params: QueryParamTypes = None, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + allow_redirects: bool = True, + verify: VerifyTypes = True, + cert: CertTypes = None, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **allow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + allow_redirects=allow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URLTypes, + *, + params: QueryParamTypes = None, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + allow_redirects: bool = True, + verify: VerifyTypes = True, + cert: CertTypes = None, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + allow_redirects=allow_redirects, + ) as response: + yield response + + +def get( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + proxies: ProxiesTypes = None, + allow_redirects: bool = True, + cert: CertTypes = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, and `json` parameters are not available on + this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxies=proxies, + allow_redirects=allow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/.venv/lib/python3.9/site-packages/httpx/_auth.py b/.venv/lib/python3.9/site-packages/httpx/_auth.py new file mode 100644 index 0000000..343f9cd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_auth.py @@ -0,0 +1,304 @@ +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Request, Response +from ._utils import to_bytes, to_str, unquote + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ): + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + challenge = self._parse_challenge(request, response, auth_header) + request.headers["Authorization"] = self._build_auth_header(request, challenge) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> "_DigestAuthChallenge": + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: typing.Dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: "_DigestAuthChallenge" + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nonce_count = 1 # TODO: implement nonce counting + nc_value = b"%08x" % nonce_count + cnonce = self._get_client_nonce(nonce_count, challenge.nonce) + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + digest_data = [HA1, challenge.nonce, HA2] + else: + digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2] + key_digest = b":".join(digest_data) + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join((HA1, key_digest))), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop( + self, qop: typing.Optional[bytes], request: Request + ) -> typing.Optional[bytes]: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: typing.Optional[bytes] + qop: typing.Optional[bytes] diff --git a/.venv/lib/python3.9/site-packages/httpx/_client.py b/.venv/lib/python3.9/site-packages/httpx/_client.py new file mode 100644 index 0000000..c6e1efb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_client.py @@ -0,0 +1,1982 @@ +import datetime +import enum +import typing +import warnings +from contextlib import contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._compat import asynccontextmanager +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import URL, Cookies, Headers, QueryParams, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import ( + AsyncBaseTransport, + AsyncByteStream, + BaseTransport, + SyncByteStream, +) +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.wsgi import WSGITransport +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) +from ._utils import ( + NetRCInfo, + Timer, + URLPattern, + get_environment_proxies, + get_logger, + same_origin, +) + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + pass # pragma: nocover + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = get_logger(__name__) + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + seconds = self._timer.sync_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + seconds = await self._timer.async_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + await self._stream.aclose() + + +class BaseClient: + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + base_url: URLTypes = "", + trust_env: bool = True, + ): + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._netrc = NetRCInfo() + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxies: typing.Optional[ProxiesTypes], allow_env_proxies: bool + ) -> typing.Dict[str, typing.Optional[Proxy]]: + if proxies is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + if isinstance(proxies, dict): + new_proxies = {} + for key, value in proxies.items(): + proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value + new_proxies[str(key)] = proxy + return new_proxies + else: + proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> typing.Dict[str, typing.List[typing.Callable]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks( + self, event_hooks: typing.Dict[str, typing.List[typing.Callable]] + ) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> typing.Optional[Auth]: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URLTypes) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + + def _merge_url(self, url: URLTypes) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # seperator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies( + self, cookies: CookieTypes = None + ) -> typing.Optional[CookieTypes]: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers( + self, headers: HeaderTypes = None + ) -> typing.Optional[HeaderTypes]: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: QueryParamTypes = None + ) -> typing.Optional[QueryParamTypes]: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + merged_queryparams = merged_queryparams.merge(params) + return merged_queryparams + return params + + def _build_auth(self, auth: AuthTypes) -> typing.Optional[Auth]: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + if self.trust_env and "Authorization" not in request.headers: + credentials = self._netrc.get_credentials(request.url.host) + if credentials is not None: + return BasicAuth(username=credentials[0], password=credentials[1]) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, url=url, headers=headers, cookies=cookies, stream=stream + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not same_origin(url, request.url): + # Strip Authorization headers when responses are redirected away from + # the origin. + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An WSGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + """ + + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + proxies: ProxiesTypes = None, + mounts: typing.Mapping[str, BaseTransport] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + base_url: URLTypes = "", + transport: BaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: nocover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: BaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ) -> BaseTransport: + if transport is not None: + return transport + + if app is not None: + return WSGITransport(app=app) + + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + return self.send( + request, auth=auth, allow_redirects=allow_redirects, timeout=timeout + ) + + @contextmanager + def stream( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + response = self.send( + request=request, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + timeout = ( + self.timeout if isinstance(timeout, UseClientDefault) else Timeout(timeout) + ) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + for hook in self._event_hooks["response"]: + hook(response) + + return response + + except Exception as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + for hook in self._event_hooks["request"]: + hook(request) + + while True: + response = self._send_handling_redirects( + request, + timeout=timeout, + allow_redirects=allow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except Exception as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + response = self._send_single_request(request, timeout) + try: + response.history = list(history) + + if not response.is_redirect: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if allow_redirects: + response.read() + else: + response.next_request = request + return response + + except Exception as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request, timeout: Timeout) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + timer.sync_start() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + (status_code, headers, stream, extensions) = transport.handle_request( + request.method.encode(), + request.url.raw, + headers=request.headers.raw, + stream=request.stream, + extensions={"timeout": timeout.as_dict()}, + ) + + response = Response( + status_code, + headers=headers, + stream=stream, + extensions=extensions, + request=request, + ) + + response.stream = BoundSyncStream(stream, response=response, timer=timer) + self.cookies.extract_cookies(response) + + status = f"{response.status_code} {response.reason_phrase}" + response_line = f"{response.http_version} {status}" + logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + + return response + + def get( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def options( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def head( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def post( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def put( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def patch( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + def __del__(self) -> None: + # We use 'getattr' here, to manage the case where '__del__()' is called + # on a partically initiallized instance that raised an exception during + # the call to '__init__()'. + if getattr(self, "_state", None) == ClientState.OPENED: # noqa: B009 + self.close() + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, or `False` (disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + """ + + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + proxies: ProxiesTypes = None, + mounts: typing.Mapping[str, AsyncBaseTransport] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Mapping[str, typing.List[typing.Callable]] = None, + base_url: URLTypes = "", + transport: AsyncBaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: nocover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + + self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: AsyncBaseTransport = None, + app: typing.Callable = None, + trust_env: bool = True, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + if app is not None: + return ASGITransport(app=app) + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + response = await self.send( + request, auth=auth, allow_redirects=allow_redirects, timeout=timeout + ) + return response + + @asynccontextmanager + async def stream( + self, + method: str, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + response = await self.send( + request=request, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + timeout = ( + self.timeout if isinstance(timeout, UseClientDefault) else Timeout(timeout) + ) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + for hook in self._event_hooks["response"]: + await hook(response) + + return response + + except Exception as exc: + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + for hook in self._event_hooks["request"]: + await hook(request) + + while True: + response = await self._send_handling_redirects( + request, + timeout=timeout, + allow_redirects=allow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except Exception as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + timeout: Timeout, + allow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + response = await self._send_single_request(request, timeout) + try: + response.history = list(history) + + if not response.is_redirect: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if allow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except Exception as exc: + await response.aclose() + raise exc + + async def _send_single_request( + self, request: Request, timeout: Timeout + ) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + await timer.async_start() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + ( + status_code, + headers, + stream, + extensions, + ) = await transport.handle_async_request( + request.method.encode(), + request.url.raw, + headers=request.headers.raw, + stream=request.stream, + extensions={"timeout": timeout.as_dict()}, + ) + + response = Response( + status_code, + headers=headers, + stream=stream, + extensions=extensions, + request=request, + ) + + response.stream = BoundAsyncStream(stream, response=response, timer=timer) + self.cookies.extract_cookies(response) + + status = f"{response.status_code} {response.reason_phrase}" + response_line = f"{response.http_version} {status}" + logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + + return response + + async def get( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def options( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def head( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def post( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def put( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def patch( + self, + url: URLTypes, + *, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) + + def __del__(self) -> None: + # We use 'getattr' here, to manage the case where '__del__()' is called + # on a partically initiallized instance that raised an exception during + # the call to '__init__()'. + if getattr(self, "_state", None) == ClientState.OPENED: # noqa: B009 + # Unlike the sync case, we cannot silently close the client when + # it is garbage collected, because `.aclose()` is an async operation, + # but `__del__` is not. + # + # For this reason we require explicit close management for + # `AsyncClient`, and issue a warning on unclosed clients. + # + # The context managed style is usually preferable, because it neatly + # ensures proper resource cleanup: + # + # async with httpx.AsyncClient() as client: + # ... + # + # However, an explicit call to `aclose()` is also sufficient: + # + # client = httpx.AsyncClient() + # try: + # ... + # finally: + # await client.aclose() + warnings.warn( + f"Unclosed {self!r}. " + "See https://www.python-httpx.org/async/#opening-and-closing-clients " + "for details." + ) diff --git a/.venv/lib/python3.9/site-packages/httpx/_compat.py b/.venv/lib/python3.9/site-packages/httpx/_compat.py new file mode 100644 index 0000000..98a3e37 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_compat.py @@ -0,0 +1,25 @@ +""" +The _compat module is used for code which requires branching between different +Python environments. It is excluded from the code coverage checks. +""" +import ssl +import sys + +# `contextlib.asynccontextmanager` exists from Python 3.7 onwards. +# For 3.6 we require the `async_generator` package for a backported version. +try: + from contextlib import asynccontextmanager # type: ignore +except ImportError: + from async_generator import asynccontextmanager # type: ignore # noqa + + +def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + if sys.version_info >= (3, 10): + context.minimum_version = ssl.TLSVersion.TLSv1_2 + else: + # These become deprecated in favor of 'context.minimum_version' + # from Python 3.10 onwards. + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.options |= ssl.OP_NO_TLSv1 + context.options |= ssl.OP_NO_TLSv1_1 diff --git a/.venv/lib/python3.9/site-packages/httpx/_config.py b/.venv/lib/python3.9/site-packages/httpx/_config.py new file mode 100644 index 0000000..9d29f9f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_config.py @@ -0,0 +1,358 @@ +import os +import ssl +import typing +from base64 import b64encode +from pathlib import Path + +import certifi + +from ._compat import set_minimum_tls_version_1_2 +from ._models import URL, Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes +from ._utils import get_ca_bundle_from_env, get_logger + +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + + +logger = get_logger(__name__) + + +class UnsetType: + pass # pragma: nocover + + +UNSET = UnsetType() + + +def create_ssl_context( + cert: CertTypes = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, +) -> ssl.SSLContext: + return SSLConfig( + cert=cert, verify=verify, trust_env=trust_env, http2=http2 + ).ssl_context + + +class SSLConfig: + """ + SSL Configuration. + """ + + DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) + + def __init__( + self, + *, + cert: CertTypes = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, + ): + self.cert = cert + self.verify = verify + self.trust_env = trust_env + self.http2 = http2 + self.ssl_context = self.load_ssl_context() + + def load_ssl_context(self) -> ssl.SSLContext: + logger.trace( + f"load_ssl_context " + f"verify={self.verify!r} " + f"cert={self.cert!r} " + f"trust_env={self.trust_env!r} " + f"http2={self.http2!r}" + ) + + if self.verify: + return self.load_ssl_context_verify() + return self.load_ssl_context_no_verify() + + def load_ssl_context_no_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for unverified connections. + """ + context = self._create_default_ssl_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + self._load_client_certs(context) + return context + + def load_ssl_context_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for verified connections. + """ + if self.trust_env and self.verify is True: + ca_bundle = get_ca_bundle_from_env() + if ca_bundle is not None: + self.verify = ca_bundle + + if isinstance(self.verify, ssl.SSLContext): + # Allow passing in our own SSLContext object that's pre-configured. + context = self.verify + self._load_client_certs(context) + return context + elif isinstance(self.verify, bool): + ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH + elif Path(self.verify).exists(): + ca_bundle_path = Path(self.verify) + else: + raise IOError( + "Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(self.verify) + ) + + context = self._create_default_ssl_context() + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = True + + # Signal to server support for PHA in TLS 1.3. Raises an + # AttributeError if only read-only access is implemented. + try: + context.post_handshake_auth = True # type: ignore + except AttributeError: # pragma: nocover + pass + + # Disable using 'commonName' for SSLContext.check_hostname + # when the 'subjectAltName' extension isn't available. + try: + context.hostname_checks_common_name = False # type: ignore + except AttributeError: # pragma: nocover + pass + + if ca_bundle_path.is_file(): + logger.trace(f"load_verify_locations cafile={ca_bundle_path!s}") + context.load_verify_locations(cafile=str(ca_bundle_path)) + elif ca_bundle_path.is_dir(): + logger.trace(f"load_verify_locations capath={ca_bundle_path!s}") + context.load_verify_locations(capath=str(ca_bundle_path)) + + self._load_client_certs(context) + + return context + + def _create_default_ssl_context(self) -> ssl.SSLContext: + """ + Creates the default SSLContext object that's used for both verified + and unverified connections. + """ + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + set_minimum_tls_version_1_2(context) + context.options |= ssl.OP_NO_COMPRESSION + context.set_ciphers(DEFAULT_CIPHERS) + + if ssl.HAS_ALPN: + alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] + context.set_alpn_protocols(alpn_idents) + + if hasattr(context, "keylog_filename"): # pragma: nocover (Available in 3.8+) + keylogfile = os.environ.get("SSLKEYLOGFILE") + if keylogfile and self.trust_env: + context.keylog_filename = keylogfile # type: ignore + + return context + + def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: + """ + Loads client certificates into our SSLContext object + """ + if self.cert is not None: + if isinstance(self.cert, str): + ssl_context.load_cert_chain(certfile=self.cert) + elif isinstance(self.cert, tuple) and len(self.cert) == 2: + ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) + elif isinstance(self.cert, tuple) and len(self.cert) == 3: + ssl_context.load_cert_chain( + certfile=self.cert[0], + keyfile=self.cert[1], + password=self.cert[2], # type: ignore + ) + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + *, + connect: typing.Union[None, float, UnsetType] = UNSET, + read: typing.Union[None, float, UnsetType] = UNSET, + write: typing.Union[None, float, UnsetType] = UNSET, + pool: typing.Union[None, float, UnsetType] = UNSET, + ): + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> typing.Dict[str, typing.Optional[float]]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + """ + + def __init__( + self, + *, + max_connections: int = None, + max_keepalive_connections: int = None, + keepalive_expiry: typing.Optional[float] = 5.0, + ): + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, url: URLTypes, *, headers: HeaderTypes = None, mode: str = "DEFAULT" + ): + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + if mode not in ("DEFAULT", "FORWARD_ONLY", "TUNNEL_ONLY"): + raise ValueError(f"Unknown proxy mode {mode!r}") + + if url.username or url.password: + headers.setdefault( + "Proxy-Authorization", + self._build_auth_header(url.username, url.password), + ) + # Remove userinfo from the URL authority, e.g.: + # 'username:password@proxy_host:proxy_port' -> 'proxy_host:proxy_port' + url = url.copy_with(username=None, password=None) + + self.url = url + self.headers = headers + self.mode = mode + + def _build_auth_header(self, username: str, password: str) -> str: + userpass = (username.encode("utf-8"), password.encode("utf-8")) + token = b64encode(b":".join(userpass)).decode() + return f"Basic {token}" + + def __repr__(self) -> str: + return ( + f"Proxy(url={str(self.url)!r}, " + f"headers={dict(self.headers)!r}, " + f"mode={self.mode!r})" + ) + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/.venv/lib/python3.9/site-packages/httpx/_content.py b/.venv/lib/python3.9/site-packages/httpx/_content.py new file mode 100644 index 0000000..86f3c7c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_content.py @@ -0,0 +1,207 @@ +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Dict, + Iterable, + Iterator, + Tuple, + Union, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._transports.base import AsyncByteStream, SyncByteStream +from ._types import RequestContent, RequestData, RequestFiles, ResponseContent +from ._utils import peek_filelike_length, primitive_value_to_str + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + def __init__(self, stream: Iterable[bytes]): + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + def __init__(self, stream: AsyncIterable[bytes]): + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: nocover + + +def encode_content( + content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable): + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: dict, +) -> Tuple[Dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: dict, files: RequestFiles, boundary: bytes = None +) -> Tuple[Dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: + body = json_dumps(json).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: Any = None, + boundary: bytes = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, dict): + # We prefer to seperate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: ResponseContent = None, + text: str = None, + html: str = None, + json: Any = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/.venv/lib/python3.9/site-packages/httpx/_decoders.py b/.venv/lib/python3.9/site-packages/httpx/_decoders.py new file mode 100644 index 0000000..2230b77 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_decoders.py @@ -0,0 +1,369 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" +import codecs +import io +import typing +import zlib + +from ._exceptions import DecodingError + +try: + import brotlicffi +except ImportError: # pragma: nocover + brotlicffi = None + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: nocover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: nocover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: nocover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: nocover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotlicffi is None: # pragma: nocover + raise ImportError( + "Using 'BrotliDecoder', but the 'brotlicffi' library " + "is not installed." + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotlicffi.Decompressor() + self.seen_data = False + if hasattr(self.decompressor, "decompress"): + self._decompress = self.decompressor.decompress + else: + self._decompress = self.decompressor.process # pragma: nocover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self.decompressor.decompress(data) + except brotlicffi.Error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + self.decompressor.finish() + return b"" + except brotlicffi.Error as exc: # pragma: nocover + raise DecodingError(str(exc)) from exc + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> typing.List[bytes]: + if self._chunk_size is None: + return [content] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> typing.List[str]: + if self._chunk_size is None: + return [content] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: typing.Optional[str] = None): + self.decoder: typing.Optional[codecs.IncrementalDecoder] = None + if encoding is not None: + self.decoder = codecs.getincrementaldecoder(encoding)(errors="strict") + + def decode(self, data: bytes) -> str: + """ + If an encoding is explicitly specified, then we use that. + Otherwise our strategy is to attempt UTF-8, and fallback to Windows 1252. + + Note that UTF-8 is a strict superset of ascii, and Windows 1252 is a + superset of the non-control characters in iso-8859-1, so we essentially + end up supporting any of ascii, utf-8, iso-8859-1, cp1252. + + Given that UTF-8 is now by *far* the most widely used encoding, this + should be a pretty robust strategy for cases where a charset has + not been explicitly included. + + Useful stats on the prevalence of different charsets in the wild... + + * https://w3techs.com/technologies/overview/character_encoding + * https://w3techs.com/technologies/history_overview/character_encoding + + The HTML5 spec also has some useful guidelines, suggesting defaults of + either UTF-8 or Windows 1252 in most cases... + + * https://dev.w3.org/html5/spec-LC/Overview.html + """ + if self.decoder is None: + # If this is the first decode pass then we need to determine which + # encoding to use by attempting UTF-8 and raising any decode errors. + attempt_utf_8 = codecs.getincrementaldecoder("utf-8")(errors="strict") + try: + attempt_utf_8.decode(data) + except UnicodeDecodeError: + # Could not decode as UTF-8. Use Windows 1252. + self.decoder = codecs.getincrementaldecoder("cp1252")(errors="replace") + else: + # Can decode as UTF-8. Use UTF-8 with lenient error settings. + self.decoder = codecs.getincrementaldecoder("utf-8")(errors="replace") + + return self.decoder.decode(data) + + def flush(self) -> str: + if self.decoder is None: + return "" + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Uses universal line decoding, supporting any of `\n`, `\r`, or `\r\n` + as line endings, normalizing to `\n`. + """ + + def __init__(self) -> None: + self.buffer = "" + + def decode(self, text: str) -> typing.List[str]: + lines = [] + + if text and self.buffer and self.buffer[-1] == "\r": + if text.startswith("\n"): + # Handle the case where we have an "\r\n" split across + # our previous input, and our new chunk. + lines.append(self.buffer[:-1] + "\n") + self.buffer = "" + text = text[1:] + else: + # Handle the case where we have "\r" at the end of our + # previous input. + lines.append(self.buffer[:-1] + "\n") + self.buffer = "" + + while text: + num_chars = len(text) + for idx in range(num_chars): + char = text[idx] + next_char = None if idx + 1 == num_chars else text[idx + 1] + if char == "\n": + lines.append(self.buffer + text[: idx + 1]) + self.buffer = "" + text = text[idx + 1 :] + break + elif char == "\r" and next_char == "\n": + lines.append(self.buffer + text[:idx] + "\n") + self.buffer = "" + text = text[idx + 2 :] + break + elif char == "\r" and next_char is not None: + lines.append(self.buffer + text[:idx] + "\n") + self.buffer = "" + text = text[idx + 1 :] + break + elif next_char is None: + self.buffer += text + text = "" + break + + return lines + + def flush(self) -> typing.List[str]: + if self.buffer.endswith("\r"): + # Handle the case where we had a trailing '\r', which could have + # been a '\r\n' pair. + lines = [self.buffer[:-1] + "\n"] + elif self.buffer: + lines = [self.buffer] + else: + lines = [] + self.buffer = "" + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, +} + + +if brotlicffi is None: + SUPPORTED_DECODERS.pop("br") # pragma: nocover diff --git a/.venv/lib/python3.9/site-packages/httpx/_exceptions.py b/.venv/lib/python3.9/site-packages/httpx/_exceptions.py new file mode 100644 index 0000000..b6e59aa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_exceptions.py @@ -0,0 +1,339 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + + RequestBodyUnavailable + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: nocover + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__(self, message: str, *, request: "Request" = None) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + @property + def request(self) -> "Request": + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: "Request") -> None: + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For exaample, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__( + self, message: str, *, request: "Request", response: "Response" + ) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = "Attempted to access streaming response content, without having called `read()`." + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = "Attempted to access streaming request content, without having called `read()`." + super().__init__(message) + + +@contextlib.contextmanager +def request_context(request: "Request" = None) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/.venv/lib/python3.9/site-packages/httpx/_models.py b/.venv/lib/python3.9/site-packages/httpx/_models.py new file mode 100644 index 0000000..06ebb92 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_models.py @@ -0,0 +1,1843 @@ +import cgi +import datetime +import email.message +import json as jsonlib +import typing +import urllib.request +from collections.abc import MutableMapping +from http.cookiejar import Cookie, CookieJar +from urllib.parse import parse_qs, quote, unquote, urlencode + +import idna +import rfc3986 +import rfc3986.exceptions + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + InvalidURL, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._status_codes import codes +from ._transports.base import AsyncByteStream, SyncByteStream +from ._types import ( + CookieTypes, + HeaderTypes, + PrimitiveData, + QueryParamTypes, + RawURL, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + URLTypes, +) +from ._utils import ( + guess_json_utf, + is_known_encoding, + normalize_header_key, + normalize_header_value, + obfuscate_sensitive_headers, + parse_header_links, + primitive_value_to_str, +) + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with + `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can only + be properly URL escaped when decoding the parameter names and values themselves. + """ + + def __init__( + self, url: typing.Union["URL", str, RawURL] = "", **kwargs: typing.Any + ) -> None: + if isinstance(url, (str, tuple)): + if isinstance(url, tuple): + raw_scheme, raw_host, port, raw_path = url + scheme = raw_scheme.decode("ascii") + host = raw_host.decode("ascii") + if host and ":" in host and host[0] != "[": + # it's an IPv6 address, so it should be enclosed in "[" and "]" + # ref: https://tools.ietf.org/html/rfc2732#section-2 + # ref: https://tools.ietf.org/html/rfc3986#section-3.2.2 + host = f"[{host}]" + port_str = "" if port is None else f":{port}" + path = raw_path.decode("ascii") + url = f"{scheme}://{host}{port_str}{path}" + + try: + self._uri_reference = rfc3986.iri_reference(url).encode() + except rfc3986.exceptions.InvalidAuthority as exc: + raise InvalidURL(message=str(exc)) from None + + if self.is_absolute_url: + # We don't want to normalize relative URLs, since doing so + # removes any leading `../` portion. + self._uri_reference = self._uri_reference.normalize() + elif isinstance(url, URL): + self._uri_reference = url._uri_reference + else: + raise TypeError( + f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}" + ) + + # Perform port normalization, following the WHATWG spec for default ports. + # + # See: + # * https://tools.ietf.org/html/rfc3986#section-3.2.3 + # * https://url.spec.whatwg.org/#url-miscellaneous + # * https://url.spec.whatwg.org/#scheme-state + default_port = { + "ftp": ":21", + "http": ":80", + "https": ":443", + "ws": ":80", + "wss": ":443", + }.get(self._uri_reference.scheme, "") + authority = self._uri_reference.authority or "" + if default_port and authority.endswith(default_port): + authority = authority[: -len(default_port)] + self._uri_reference = self._uri_reference.copy_with(authority=authority) + + if kwargs: + self._uri_reference = self.copy_with(**kwargs)._uri_reference + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme or "" + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + userinfo = self._uri_reference.userinfo or "" + return userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo or "" + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo or "" + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host or "" + + if host and ":" in host and host[0] == "[": + # it's an IPv6 address + host = host.lstrip("[").rstrip("]") + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host or "" + + if host and ":" in host and host[0] == "[": + # it's an IPv6 address + host = host.lstrip("[").rstrip("]") + + return host.encode("ascii") + + @property + def port(self) -> typing.Optional[int]: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + port = self._uri_reference.port + return int(port) if port else None + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + host = self._uri_reference.host or "" + port = self._uri_reference.port + netloc = host.encode("ascii") + if port: + netloc = netloc + b":" + port.encode("ascii") + return netloc + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is neccessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> "QueryParams": + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + The URL in the raw representation used by the low level + transport API. See `BaseTransport.handle_request`. + + Provides the (scheme, host, port, target) for the outgoing request. + """ + return ( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> "URL": + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret") + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Step 1 + # ====== + # + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for copy_with()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + + # Step 2 + # ====== + # + # Consolidate "username", "password", "userinfo", "host", "port" and "netloc" + # into a single "authority" keyword, for `rfc3986`. + if "username" in kwargs or "password" in kwargs: + # Consolidate "username" and "password" into "userinfo". + username = quote(kwargs.pop("username", self.username) or "") + password = quote(kwargs.pop("password", self.password) or "") + userinfo = f"{username}:{password}" if password else username + kwargs["userinfo"] = userinfo.encode("ascii") + + if "host" in kwargs or "port" in kwargs: + # Consolidate "host" and "port" into "netloc". + host = kwargs.pop("host", self.host) or "" + port = kwargs.pop("port", self.port) + + if host and ":" in host and host[0] != "[": + # IPv6 addresses need to be escaped within sqaure brackets. + host = f"[{host}]" + + kwargs["netloc"] = ( + f"{host}:{port}".encode("ascii") + if port is not None + else host.encode("ascii") + ) + + if "userinfo" in kwargs or "netloc" in kwargs: + # Consolidate "userinfo" and "netloc" into authority. + userinfo = (kwargs.pop("userinfo", self.userinfo) or b"").decode("ascii") + netloc = (kwargs.pop("netloc", self.netloc) or b"").decode("ascii") + authority = f"{userinfo}@{netloc}" if userinfo else netloc + kwargs["authority"] = authority + + # Step 3 + # ====== + # + # Wrangle any "path", "query", "raw_path" and "params" keywords into + # "query" and "path" keywords for `rfc3986`. + if "raw_path" in kwargs: + # If "raw_path" is included, then split it into "path" and "query" components. + raw_path = kwargs.pop("raw_path") or b"" + path, has_query, query = raw_path.decode("ascii").partition("?") + kwargs["path"] = path + kwargs["query"] = query if has_query else None + + else: + if kwargs.get("path") is not None: + # Ensure `kwargs["path"] = ` for `rfc3986`. + kwargs["path"] = quote(kwargs["path"]) + + if kwargs.get("query") is not None: + # Ensure `kwargs["query"] = ` for `rfc3986`. + # + # Note that `.copy_with(query=None)` and `.copy_with(query=b"")` + # are subtly different. The `None` style will not include an empty + # trailing "?" character. + kwargs["query"] = kwargs["query"].decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + # Step 4 + # ====== + # + # Ensure any fragment component is quoted. + if kwargs.get("fragment") is not None: + kwargs["fragment"] = quote(kwargs["fragment"]) + + # Step 5 + # ====== + # + # At this point kwargs may include keys for "scheme", "authority", "path", + # "query" and "fragment". Together these constitute the entire URL. + # + # See https://tools.ietf.org/html/rfc3986#section-3 + # + # foo://example.com:8042/over/there?name=ferret#nose + # \_/ \______________/\_________/ \_________/ \__/ + # | | | | | + # scheme authority path query fragment + return URL(self._uri_reference.copy_with(**kwargs).unsplit()) + + def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> "URL": + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> "URL": + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URLTypes) -> "URL": + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + if self.is_relative_url: + # Workaround to handle relative URLs, which otherwise raise + # rfc3986.exceptions.ResolutionError when used as an argument + # in `.resolve_with`. + return ( + self.copy_with(scheme="http", host="example.com") + .join(url) + .copy_with(scheme=None, host=None) + ) + + # We drop any fragment portion, because RFC 3986 strictly + # treats URLs with a fragment portion as not being absolute URLs. + base_uri = self._uri_reference.copy_with(fragment=None) + relative_url = URL(url) + return URL(relative_url._uri_reference.resolve_with(base_uri).unsplit()) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return self._uri_reference.unsplit() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url_str = str(self) + if self._uri_reference.userinfo: + # Mask any password component in the URL representation, to lower the + # risk of unintended leakage, such as in debug information and logging. + username = quote(self.username) + url_str = ( + rfc3986.urlparse(url_str) + .copy_with(userinfo=f"{username}:[secure]") + .unsplit() + ) + return f"{class_name}({url_str!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__(self, *args: QueryParamTypes, **kwargs: typing.Any) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + items: typing.Sequence[typing.Tuple[str, PrimitiveData]] + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: typing.List[typing.Tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> typing.List[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> "QueryParams": + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: QueryParamTypes = None) -> "QueryParams": + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: QueryParamTypes = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__(self, headers: HeaderTypes = None, encoding: str = None) -> None: + if headers is None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + elif isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, dict): + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers.items() + ] + else: + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers + ] + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma seperated value when a key occurs multiple times. + """ + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurences of the same key without concatenating into a single + comma seperated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma seperated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: HeaderTypes = None) -> None: # type: ignore + headers = Headers(headers) + for key, value in headers.raw: + self[key.decode(headers.encoding)] = value.decode(headers.encoding) + + def copy(self) -> "Headers": + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [] + for _, header_key, header_value in self._list: + if header_key == normalized_key: + items.append(header_value.decode(self.encoding)) + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [] + for idx, (_, item_key, _) in enumerate(self._list): + if item_key == lookup_key: + found_indexes.append(idx) + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [] + for idx, (_, item_key, _) in enumerate(self._list): + if item_key.lower() == del_key: + pop_indexes.append(idx) + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: typing.Union[str, bytes], + url: typing.Union["URL", str, RawURL], + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + content: RequestContent = None, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + stream: typing.Union[SyncByteStream, AsyncByteStream] = None, + ): + if isinstance(method, bytes): + self.method = method.decode("ascii").upper() + else: + self.method = method.upper() + self.url = URL(url) + if params is not None: + self.url = self.url.copy_merge_params(params=params) + self.headers = Headers(headers) + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + headers, stream = encode_request(content, data, files, json) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["stream"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: HeaderTypes = None, + content: ResponseContent = None, + text: str = None, + html: str = None, + json: typing.Any = None, + stream: typing.Union[SyncByteStream, AsyncByteStream] = None, + request: Request = None, + extensions: dict = None, + history: typing.List["Response"] = None, + ): + self.status_code = status_code + self.headers = Headers(headers) + + self._request: typing.Optional[Request] = request + + # When allow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: typing.Optional[Request] = None + + self.extensions = {} if extensions is None else extensions + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + return self.extensions["http_version"].decode("ascii", errors="ignore") + except KeyError: + return "HTTP/1.1" + + @property + def reason_phrase(self) -> str: + try: + return self.extensions["reason_phrase"].decode("ascii", errors="ignore") + except KeyError: + return codes.get_reason_phrase(self.status_code) + + @property + def url(self) -> typing.Optional[URL]: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding) + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> typing.Optional[str]: + """ + Return the encoding, which may have been set explicitly, or may have + been specified by the Content-Type header. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not is_known_encoding(encoding): + self._encoding = None + else: + self._encoding = encoding + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def charset_encoding(self) -> typing.Optional[str]: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + _, params = cgi.parse_header(content_type) + if "charset" not in params: + return None + + return params["charset"].strip("'\"") + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: typing.List[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_error(self) -> bool: + return codes.is_error(self.status_code) + + @property + def is_redirect(self) -> bool: + return codes.is_redirect(self.status_code) and "location" in self.headers + + def raise_for_status(self) -> None: + """ + Raise the `HTTPStatusError` if one occurred. + """ + message = ( + "{0.status_code} {error_type}: {0.reason_phrase} for url: {0.url}\n" + "For more information check: https://httpstatuses.com/{0.status_code}" + ) + + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if codes.is_client_error(self.status_code): + message = message.format(self, error_type="Client Error") + raise HTTPStatusError(message, request=request, response=self) + elif codes.is_server_error(self.status_code): + message = message.format(self, error_type="Server Error") + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + if self.charset_encoding is None and self.content and len(self.content) > 3: + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return jsonlib.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + pass + return jsonlib.loads(self.text, **kwargs) + + @property + def cookies(self) -> "Cookies": + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + ldict = {} + if header: + links = parse_header_links(header) + for link in links: + key = link.get("rel") or link.get("url") + ldict[key] = link + return ldict + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes(self, chunk_size: int = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), chunk_size): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk + for chunk in chunker.flush(): + yield chunk + + def iter_text(self, chunk_size: int = None) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding) + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw(self, chunk_size: int = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), chunk_size): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk + for chunk in chunker.flush(): + yield chunk + + async def aiter_text(self, chunk_size: int = None) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding) + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw(self, chunk_size: int = None) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(MutableMapping): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: CookieTypes = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, name: str, default: str = None, domain: str = None, path: str = None + ) -> typing.Optional[str]: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete(self, name: str, domain: str = None, path: str = None) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [] + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + remove.append(cookie) + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear(self, domain: str = None, path: str = None) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: CookieTypes = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response): + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/.venv/lib/python3.9/site-packages/httpx/_multipart.py b/.venv/lib/python3.9/site-packages/httpx/_multipart.py new file mode 100644 index 0000000..36bae66 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_multipart.py @@ -0,0 +1,205 @@ +import binascii +import os +import typing +from pathlib import Path + +from ._transports.base import AsyncByteStream, SyncByteStream +from ._types import FileContent, FileTypes, RequestFiles +from ._utils import ( + format_form_param, + guess_content_type, + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__( + self, name: str, value: typing.Union[str, bytes, int, float, None] + ) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + f"Invalid type for value. Expected primitive type, got {type(value)}: {value!r}" + ) + self.name = name + self.value: typing.Union[str, bytes] = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + if isinstance(value, tuple): + try: + filename, fileobj, content_type = value # type: ignore + except ValueError: + filename, fileobj = value # type: ignore + content_type = guess_content_type(filename) + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + content_type = guess_content_type(filename) + + self.filename = filename + self.file = fileobj + self.content_type = content_type + self._consumed = False + + def get_length(self) -> int: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + # Let's do our best not to read `file` into memory. + file_length = peek_filelike_length(self.file) + if file_length is None: + # As a last resort, read file and cache contents for later. + assert not hasattr(self, "_data") + self._data = to_bytes(self.file.read()) + file_length = len(self._data) + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + format_form_param("name", self.name), + ] + if self.filename: + filename = format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + if self.content_type is not None: + content_type = self.content_type.encode() + parts.extend([b"\r\nContent-Type: ", content_type]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self, "_data"): + # Already rendered. + yield self._data + return + + if self._consumed: # pragma: nocover + self.file.seek(0) + self._consumed = True + + for chunk in self.file: + yield to_bytes(chunk) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__(self, data: dict, files: RequestFiles, boundary: bytes = None) -> None: + if boundary is None: + boundary = binascii.hexlify(os.urandom(16)) + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: dict, files: RequestFiles + ) -> typing.Iterator[typing.Union[FileField, DataField]]: + for name, value in data.items(): + if isinstance(value, list): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def iter_chunks_lengths(self) -> typing.Iterator[int]: + boundary_length = len(self.boundary) + # Follow closely what `.iter_chunks()` does. + for field in self.fields: + yield 2 + boundary_length + 2 + yield field.get_length() + yield 2 + yield 2 + boundary_length + 4 + + def get_content_length(self) -> int: + return sum(self.iter_chunks_lengths()) + + # Content stream interface. + + def get_headers(self) -> typing.Dict[str, str]: + content_length = str(self.get_content_length()) + content_type = self.content_type + return {"Content-Length": content_length, "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/.venv/lib/python3.9/site-packages/httpx/_status_codes.py b/.venv/lib/python3.9/site-packages/httpx/_status_codes.py new file mode 100644 index 0000000..100aec6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_status_codes.py @@ -0,0 +1,143 @@ +from enum import IntEnum + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> "codes": + obj = int.__new__(cls, value) # type: ignore + obj._value_ = value + + obj.phrase = phrase # type: ignore + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_redirect(cls, value: int) -> bool: + return value in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + + @classmethod + def is_error(cls, value: int) -> bool: + return 400 <= value <= 599 + + @classmethod + def is_client_error(cls, value: int) -> bool: + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + return 500 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/.venv/lib/python3.9/site-packages/httpx/_transports/__init__.py b/.venv/lib/python3.9/site-packages/httpx/_transports/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/httpx/_transports/asgi.py b/.venv/lib/python3.9/site-packages/httpx/_transports/asgi.py new file mode 100644 index 0000000..24c5452 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,169 @@ +import typing +from urllib.parse import unquote + +import sniffio + +from .base import AsyncBaseTransport, AsyncByteStream + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +def create_event() -> "Event": + if sniffio.current_async_library() == "trio": + import trio + + return trio.Event() + else: + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: typing.List[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.AsyncClient(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the ASGITransport class: + + ``` + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: typing.Callable, + raise_app_exceptions: bool = True, + root_path: str = "", + client: typing.Tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + # ASGI scope. + scheme, host, port, full_path = url + path, _, query = full_path.partition(b"?") + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": method.decode(), + "headers": [(k.lower(), v) for (k, v) in headers], + "scheme": scheme.decode("ascii"), + "path": unquote(path.decode("ascii")), + "raw_path": path, + "query_string": query, + "server": (host.decode("ascii"), port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: dict) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and method != b"HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: + if self.raise_app_exceptions or not response_complete.is_set(): + raise + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + extensions = {} + + return (status_code, response_headers, stream, extensions) diff --git a/.venv/lib/python3.9/site-packages/httpx/_transports/base.py b/.venv/lib/python3.9/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000..eb51926 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_transports/base.py @@ -0,0 +1,183 @@ +import typing +from types import TracebackType + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + + +class SyncByteStream: + def __iter__(self) -> typing.Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: nocover + yield b"" # pragma: nocover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + + Streaming cases should use a `try...finally` block to ensure that + the stream `close()` method is always called. + + Example: + + status_code, headers, stream, extensions = transport.handle_request(...) + try: + ... + finally: + stream.close() + """ + + def read(self) -> bytes: + """ + Simple cases can use `.read()` as a convience method for consuming + the entire stream and then closing it. + + Example: + + status_code, headers, stream, extensions = transport.handle_request(...) + body = stream.read() + """ + try: + return b"".join([part for part in self]) + finally: + self.close() + + +class AsyncByteStream: + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: nocover + yield b"" # pragma: nocover + + async def aclose(self) -> None: + pass + + async def aread(self) -> bytes: + try: + return b"".join([part async for part in self]) + finally: + await self.aclose() + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.close() + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + """ + Send a single HTTP request and return a response. + + At this layer of API we're simply using plain primitives. No `Request` or + `Response` models, no fancy `URL` or `Header` handling. This strict point + of cut-off provides a clear design seperation between the HTTPX API, + and the low-level network handling. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response stream + should *either* be consumed immediately, with a call to `stream.read()`, + or else the `handle_request` call should be followed with a try/finally + block to ensuring the stream is always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + status_code, headers, stream, extensions = transport.handle_request( + method=b'GET', + url=(b'https', b'www.example.com', 443, b'/'), + headers=[(b'Host', b'www.example.com')], + stream=[], + extensions={} + ) + body = stream.read() + print(status_code, headers, body) + + Arguments: + + method: The request method as bytes. Eg. b'GET'. + url: The components of the request URL, as a tuple of `(scheme, host, port, target)`. + The target will usually be the URL path, but also allows for alternative + formulations, such as proxy requests which include the complete URL in + the target portion of the HTTP request, or for "OPTIONS *" requests, which + cannot be expressed in a URL string. + headers: The request headers as a list of byte pairs. + stream: The request body as a bytes iterator. + extensions: An open ended dictionary, including optional extensions to the + core request/response API. Keys may include: + timeout: A dictionary of str:Optional[float] timeout values. + May include values for 'connect', 'read', 'write', or 'pool'. + + Returns a tuple of: + + status_code: The response status code as an integer. Should be in the range 1xx-5xx. + headers: The response headers as a list of byte pairs. + stream: The response body as a bytes iterator. + extensions: An open ended dictionary, including optional extensions to the + core request/response API. Keys are plain strings, and may include: + reason_phrase: The reason-phrase of the HTTP response, as bytes. Eg b'OK'. + HTTP/2 onwards does not include a reason phrase on the wire. + When no key is included, a default based on the status code may + be used. An empty-string reason phrase should not be substituted + for a default, as it indicates the server left the portion blank + eg. the leading response bytes were b"HTTP/1.1 200 ". + http_version: The HTTP version, as bytes. Eg. b"HTTP/1.1". + When no http_version key is included, HTTP/1.1 may be assumed. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: nocover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: nocover + + async def aclose(self) -> None: + pass diff --git a/.venv/lib/python3.9/site-packages/httpx/_transports/default.py b/.venv/lib/python3.9/site-packages/httpx/_transports/default.py new file mode 100644 index 0000000..ae6c2d1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_transports/default.py @@ -0,0 +1,296 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int +* backend: str ("auto", "asyncio", "trio", "curio", "anyio", "sync") + +Example usages... + +# Disable HTTP/2 on a single specfic domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" +import contextlib +import typing +from types import TracebackType + +import httpcore + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._types import CertTypes, VerifyTypes +from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: nocover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +HTTPCORE_EXC_MAP = { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.CloseError: CloseError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, +} + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: httpcore.SyncByteStream): + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + with map_httpcore_exceptions(): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: Proxy = None, + uds: str = None, + local_address: str = None, + retries: int = 0, + backend: str = "sync", + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.SyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + backend=backend, + ) + else: + self._pool = httpcore.SyncHTTPProxy( + proxy_url=proxy.url.raw, + proxy_headers=proxy.headers.raw, + proxy_mode=proxy.mode, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http2=http2, + backend=backend, + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + with map_httpcore_exceptions(): + status_code, headers, byte_stream, extensions = self._pool.handle_request( + method=method, + url=url, + headers=headers, + stream=httpcore.IteratorByteStream(iter(stream)), + extensions=extensions, + ) + + stream = ResponseStream(byte_stream) + + return status_code, headers, stream, extensions + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: httpcore.AsyncByteStream): + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + with map_httpcore_exceptions(): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: Proxy = None, + uds: str = None, + local_address: str = None, + retries: int = 0, + backend: str = "auto", + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + backend=backend, + ) + else: + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=proxy.url.raw, + proxy_headers=proxy.headers.raw, + proxy_mode=proxy.mode, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http2=http2, + backend=backend, + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + with map_httpcore_exceptions(): + ( + status_code, + headers, + byte_stream, + extensions, + ) = await self._pool.handle_async_request( + method=method, + url=url, + headers=headers, + stream=httpcore.AsyncIteratorByteStream(stream.__aiter__()), + extensions=extensions, + ) + + stream = AsyncResponseStream(byte_stream) + + return status_code, headers, stream, extensions + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/.venv/lib/python3.9/site-packages/httpx/_transports/mock.py b/.venv/lib/python3.9/site-packages/httpx/_transports/mock.py new file mode 100644 index 0000000..8d59b73 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_transports/mock.py @@ -0,0 +1,70 @@ +import asyncio +import typing + +from .._models import Request +from .base import AsyncBaseTransport, AsyncByteStream, BaseTransport, SyncByteStream + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: typing.Callable) -> None: + self.handler = handler + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + request = Request( + method=method, + url=url, + headers=headers, + stream=stream, + ) + request.read() + response = self.handler(request) + return ( + response.status_code, + response.headers.raw, + response.stream, + response.extensions, + ) + + async def handle_async_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: AsyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], AsyncByteStream, dict + ]: + request = Request( + method=method, + url=url, + headers=headers, + stream=stream, + ) + await request.aread() + + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + # https://simonwillison.net/2020/Sep/2/await-me-maybe/ + if asyncio.iscoroutine(response): + response = await response + + return ( + response.status_code, + response.headers.raw, + response.stream, + response.extensions, + ) diff --git a/.venv/lib/python3.9/site-packages/httpx/_transports/wsgi.py b/.venv/lib/python3.9/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 0000000..c8266c7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,138 @@ +import io +import itertools +import typing +from urllib.parse import unquote + +from .base import BaseTransport, SyncByteStream + + +def _skip_leading_empty_chunks(body: typing.Iterable) -> typing.Iterable: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the ASGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: typing.Callable, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + + def handle_request( + self, + method: bytes, + url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], + stream: SyncByteStream, + extensions: dict, + ) -> typing.Tuple[ + int, typing.List[typing.Tuple[bytes, bytes]], SyncByteStream, dict + ]: + wsgi_input = io.BytesIO(b"".join(stream)) + + scheme, host, port, full_path = url + path, _, query = full_path.partition(b"?") + if port is None: + port = {b"http": 80, b"https": 443}[scheme] + + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": scheme.decode("ascii"), + "wsgi.input": wsgi_input, + "wsgi.errors": io.BytesIO(), + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": method.decode(), + "SCRIPT_NAME": self.script_name, + "PATH_INFO": unquote(path.decode("ascii")), + "QUERY_STRING": query.decode("ascii"), + "SERVER_NAME": host.decode("ascii"), + "SERVER_PORT": str(port), + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in headers: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, response_headers: list, exc_info: typing.Any = None + ) -> None: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + extensions = {} + + return (status_code, headers, stream, extensions) diff --git a/.venv/lib/python3.9/site-packages/httpx/_types.py b/.venv/lib/python3.9/site-packages/httpx/_types.py new file mode 100644 index 0000000..75bb900 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_types.py @@ -0,0 +1,91 @@ +""" +Type definitions for type checking purposes. +""" + +import ssl +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + AsyncIterable, + Callable, + Dict, + Iterable, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import URL, Cookies, Headers, QueryParams, Request # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +RawURL = Tuple[bytes, bytes, Optional[int], bytes] + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, + None, +] + +HeaderTypes = Union[ + "Headers", + Dict[str, str], + Dict[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +CertTypes = Union[ + # certfile + str, + # (certfile, keyfile) + Tuple[str, Optional[str]], + # (certfile, keyfile, password) + Tuple[str, Optional[str], Optional[str]], +] +VerifyTypes = Union[str, bool, ssl.SSLContext] +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxiesTypes = Union[URLTypes, "Proxy", Dict[URLTypes, Union[None, URLTypes, "Proxy"]]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", + None, +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] + +RequestData = dict + +FileContent = Union[IO[str], IO[bytes], str, bytes] +FileTypes = Union[ + # file (or text) + FileContent, + # (filename, file (or text)) + Tuple[Optional[str], FileContent], + # (filename, file (or text), content_type) + Tuple[Optional[str], FileContent, Optional[str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] diff --git a/.venv/lib/python3.9/site-packages/httpx/_utils.py b/.venv/lib/python3.9/site-packages/httpx/_utils.py new file mode 100644 index 0000000..30ab2ed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/httpx/_utils.py @@ -0,0 +1,508 @@ +import codecs +import logging +import mimetypes +import netrc +import os +import re +import sys +import time +import typing +from pathlib import Path +from urllib.request import getproxies + +import sniffio + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._models import URL + + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x00, 0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def normalize_header_key( + value: typing.Union[str, bytes], + lower: bool, + encoding: str = None, +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + if isinstance(value, bytes): + bytes_value = value + else: + bytes_value = value.encode(encoding or "ascii") + + return bytes_value.lower() if lower else bytes_value + + +def normalize_header_value( + value: typing.Union[str, bytes], encoding: str = None +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + return value.encode(encoding or "ascii") + + +def primitive_value_to_str(value: "PrimitiveData") -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def format_form_param(name: str, value: typing.Union[str, bytes]) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + if isinstance(value, bytes): + value = value.decode() + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = b"\x00" +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data: bytes) -> typing.Optional[str]: + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +class NetRCInfo: + def __init__(self, files: typing.Optional[typing.List[str]] = None) -> None: + if files is None: + files = [os.getenv("NETRC", ""), "~/.netrc", "~/_netrc"] + self.netrc_files = files + + @property + def netrc_info(self) -> typing.Optional[netrc.netrc]: + if not hasattr(self, "_netrc_info"): + self._netrc_info = None + for file_path in self.netrc_files: + expanded_path = Path(file_path).expanduser() + try: + if expanded_path.is_file(): + self._netrc_info = netrc.netrc(str(expanded_path)) + break + except (netrc.NetrcParseError, IOError): # pragma: nocover + # Issue while reading the netrc file, ignore... + pass + return self._netrc_info + + def get_credentials(self, host: str) -> typing.Optional[typing.Tuple[str, str]]: + if self.netrc_info is None: + return None + + auth_info = self.netrc_info.authenticators(host) + if auth_info is None or auth_info[2] is None: + return None + return (auth_info[0], auth_info[2]) + + +def get_ca_bundle_from_env() -> typing.Optional[str]: + if "SSL_CERT_FILE" in os.environ: + ssl_file = Path(os.environ["SSL_CERT_FILE"]) + if ssl_file.is_file(): + return str(ssl_file) + if "SSL_CERT_DIR" in os.environ: + ssl_path = Path(os.environ["SSL_CERT_DIR"]) + if ssl_path.is_dir(): + return str(ssl_path) + return None + + +def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: typing.List[typing.Dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def obfuscate_sensitive_headers( + items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]] +) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +_LOGGER_INITIALIZED = False +TRACE_LOG_LEVEL = 5 + + +class Logger(logging.Logger): + # Stub for type checkers. + def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + ... # pragma: nocover + + +def get_logger(name: str) -> Logger: + """ + Get a `logging.Logger` instance, and optionally + set up debug logging based on the HTTPX_LOG_LEVEL environment variable. + """ + global _LOGGER_INITIALIZED + + if not _LOGGER_INITIALIZED: + _LOGGER_INITIALIZED = True + logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") + + log_level = os.environ.get("HTTPX_LOG_LEVEL", "").upper() + if log_level in ("DEBUG", "TRACE"): + logger = logging.getLogger("httpx") + logger.setLevel(logging.DEBUG if log_level == "DEBUG" else TRACE_LOG_LEVEL) + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter( + logging.Formatter( + fmt="%(levelname)s [%(asctime)s] %(name)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + ) + logger.addHandler(handler) + + logger = logging.getLogger(name) + + def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None: + logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) + + logger.trace = trace # type: ignore + + return typing.cast(Logger, logger) + + +def port_or_default(url: "URL") -> typing.Optional[int]: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def same_origin(url: "URL", other: "URL") -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and port_or_default(url) == port_or_default(other) + ) + + +def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: typing.Dict[str, typing.Optional[str]] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # seperated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]: + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def peek_filelike_length(stream: typing.Any) -> typing.Optional[int]: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class Timer: + async def _get_time(self) -> float: + library = sniffio.current_async_library() + if library == "trio": + import trio + + return trio.current_time() + elif library == "curio": # pragma: nocover + import curio + + return await curio.clock() + + import asyncio + + return asyncio.get_event_loop().time() + + def sync_start(self) -> None: + self.started = time.perf_counter() + + async def async_start(self) -> None: + self.started = await self._get_time() + + def sync_elapsed(self) -> float: + now = time.perf_counter() + return now - self.started + + async def async_elapsed(self) -> float: + now = await self._get_time() + return now - self.started + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._models import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Optional[typing.Pattern[str]] = None + else: + if url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: "URL") -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> tuple: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: "URLPattern") -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern diff --git a/.venv/lib/python3.9/site-packages/httpx/py.typed b/.venv/lib/python3.9/site-packages/httpx/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/LICENSE.md b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/LICENSE.md new file mode 100644 index 0000000..b6f8732 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/METADATA b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/METADATA new file mode 100644 index 0000000..6446805 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/METADATA @@ -0,0 +1,236 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.3 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-3-Clause +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=3.5 +License-File: LICENSE.md + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but which only supports the +older superseded IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification does not normalize input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. As this library + serves as a low-level toolkit for a variety of applications, many of which strive + for broad compatibility with older Python versions, there is no rush to remove + older intepreter support. Removing support for older versions should be well + justified in that the maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. While active + development of the version 2.x series has ended, notable issues being corrected + may be backported to 2.x. Use "idna<3" in your requirements file if you need this + library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the IDNA specification, as + well as tests that are provided as part of the Unicode Technical Standard 46, + `Unicode IDNA Compatibility Processing `_. + +* **Emoji**. It is an occasional request to support emoji domains in this library. Encoding + of symbols like emoji is expressly prohibited by the technical standard IDNA 2008 and + emoji domains are broadly phased out across the domain industry due to associated security + risks. For now, applications that wish need to support these non-compliant labels may + wish to consider trying the encode/decode operation in this library first, and then falling + back to using `encodings.idna`. See `the Github project `_ + for more discussion. + diff --git a/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/RECORD b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/RECORD new file mode 100644 index 0000000..2c9f0dd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/RECORD @@ -0,0 +1,23 @@ +idna-3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.3.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 +idna-3.3.dist-info/METADATA,sha256=BdqiAf8ou4x1nzIHp2_sDfXWjl7BrSUGpOeVzbYHQuQ,9765 +idna-3.3.dist-info/RECORD,, +idna-3.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +idna-3.3.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/__pycache__/__init__.cpython-39.pyc,, +idna/__pycache__/codec.cpython-39.pyc,, +idna/__pycache__/compat.cpython-39.pyc,, +idna/__pycache__/core.cpython-39.pyc,, +idna/__pycache__/idnadata.cpython-39.pyc,, +idna/__pycache__/intranges.cpython-39.pyc,, +idna/__pycache__/package_data.cpython-39.pyc,, +idna/__pycache__/uts46data.cpython-39.pyc,, +idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795 +idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400 diff --git a/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna-3.3.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/.venv/lib/python3.9/site-packages/idna/__init__.py b/.venv/lib/python3.9/site-packages/idna/__init__.py new file mode 100644 index 0000000..a40eeaf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/__init__.py @@ -0,0 +1,44 @@ +from .package_data import __version__ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/.venv/lib/python3.9/site-packages/idna/codec.py b/.venv/lib/python3.9/site-packages/idna/codec.py new file mode 100644 index 0000000..1ca9ba6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/codec.py @@ -0,0 +1,112 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re +from typing import Tuple, Optional + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return "", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_str = '.'.join(result) + trailing_dot # type: ignore + size += len(trailing_dot) + return result_str, size + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def getregentry() -> codecs.CodecInfo: + # Compatibility as a search_function for codecs.register() + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, # type: ignore + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/.venv/lib/python3.9/site-packages/idna/compat.py b/.venv/lib/python3.9/site-packages/idna/compat.py new file mode 100644 index 0000000..786e6bd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/compat.py @@ -0,0 +1,13 @@ +from .core import * +from .codec import * +from typing import Any, Union + +def ToASCII(label: str) -> bytes: + return encode(label) + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/.venv/lib/python3.9/site-packages/idna/core.py b/.venv/lib/python3.9/site-packages/idna/core.py new file mode 100644 index 0000000..55ab967 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/core.py @@ -0,0 +1,397 @@ +from . import idnadata +import bisect +import unicodedata +import re +from typing import Union, Optional +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s: str) -> bytes: + return s.encode('punycode') + +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = None # type: Optional[str] + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + return label_bytes + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = str(label) + check_label(label) + label_bytes = _punycode(label) + label_bytes = _alabel_prefix + label_bytes + + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') + + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] # type: ignore + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + return unicodedata.normalize('NFC', output) + + +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/.venv/lib/python3.9/site-packages/idna/idnadata.py b/.venv/lib/python3.9/site-packages/idna/idnadata.py new file mode 100644 index 0000000..1b5805d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/idnadata.py @@ -0,0 +1,2137 @@ +# This file is automatically generated by tools/idna-data + +__version__ = '14.0.0' +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e000000a000, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b739, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + 0x300000003134b, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b120, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1640001b168, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x887: 85, + 0x888: 85, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x890: 85, + 0x891: 85, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b5: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x110bd: 85, + 0x110cd: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e94b: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3c00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc5d00000c5e, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcdd00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x170000001716, + 0x171f00001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001acf, + 0x1b0000001b4d, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c60, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f20000a7f5, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab6a, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010786, + 0x10787000107b1, + 0x107b2000107bb, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10f0000010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011076, + 0x1107f000110bb, + 0x110c2000110c3, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x1174000011747, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ab000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x12f9000012ff1, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1500001b153, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1df000001df1f, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x1fbf00001fbfa, + 0x200000002a6e0, + 0x2a7000002b739, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x300000003134b, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/.venv/lib/python3.9/site-packages/idna/intranges.py b/.venv/lib/python3.9/site-packages/idna/intranges.py new file mode 100644 index 0000000..6a43b04 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/intranges.py @@ -0,0 +1,54 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/.venv/lib/python3.9/site-packages/idna/package_data.py b/.venv/lib/python3.9/site-packages/idna/package_data.py new file mode 100644 index 0000000..f5ea87c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.3' + diff --git a/.venv/lib/python3.9/site-packages/idna/py.typed b/.venv/lib/python3.9/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/idna/uts46data.py b/.venv/lib/python3.9/site-packages/idna/uts46data.py new file mode 100644 index 0000000..8f65705 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/idna/uts46data.py @@ -0,0 +1,8512 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '14.0.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61D, 'V'), + ] + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + ] + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + ] + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, 'X'), + (0xC3C, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDD, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x180F, 'I'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ACF, 'X'), + (0x1B00, 'V'), + (0x1B4D, 'X'), + (0x1B50, 'V'), + (0x1B7F, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), + ] + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C1, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), + (0x2C30, 'V'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E5E, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11076, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110C3, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116BA, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11747, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AB0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + (0x1AFFF, 'X'), + (0x1B000, 'V'), + (0x1B123, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1EB, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E7FF, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6DD, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), + (0x1F800, 'V'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAAD, 'X'), + (0x1FAB0, 'V'), + (0x1FABB, 'X'), + (0x1FAC0, 'V'), + (0x1FAC6, 'X'), + (0x1FAD0, 'V'), + (0x1FADA, 'X'), + (0x1FAE0, 'V'), + (0x1FAE8, 'X'), + (0x1FAF0, 'V'), + (0x1FAF7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6E0, 'X'), + (0x2A700, 'V'), + (0x2B739, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/LICENSE new file mode 100644 index 0000000..f0cb90b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Beau Barker (beau at explodinglabs.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/METADATA b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/METADATA new file mode 100644 index 0000000..94ac90b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/METADATA @@ -0,0 +1,54 @@ +Metadata-Version: 2.1 +Name: jsonrpcclient +Version: 4.0.2 +Summary: Send JSON-RPC requests +Home-page: https://github.com/explodinglabs/jsonrpcclient +Author: Beau Barker +Author-email: beau@explodinglabs.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Description-Content-Type: text/markdown +License-File: LICENSE + +> September 1, 2021: Version 4 has been released. Read about the [changes in +> version 4](https://composed.blog/jsonrpcclient-4-changes), or read the [full +> documentation](https://www.jsonrpcclient.com/). +> For earlier versions jump to the [3.x +> branch](https://github.com/explodinglabs/jsonrpcclient/tree/3.x) or the +> [documentation for version 3](https://www.jsonrpcclient.com/en/3.3.6/). + +jsonrpcclient + +Generate JSON-RPC requests and parse responses in Python. + +![PyPI](https://img.shields.io/pypi/v/jsonrpcclient.svg) +![Downloads](https://pepy.tech/badge/jsonrpcclient/week) +![Code Quality](https://github.com/explodinglabs/jsonrpcclient/actions/workflows/code-quality.yml/badge.svg) +![Coverage Status](https://coveralls.io/repos/github/explodinglabs/jsonrpcclient/badge.svg?branch=main) + +```sh +pip install jsonrpcclient +``` + +```python +>>> from jsonrpcclient import parse, request +>>> import requests +>>> response = requests.post("http://localhost:5000/", json=request("ping")) +>>> parse(response.json()) +Ok(result='pong', id=1) +``` + +Full documentation is at [jsonrpcclient.com](https://www.jsonrpcclient.com/). + +See also: [jsonrpcserver](https://github.com/explodinglabs/jsonrpcserver) + + diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/RECORD b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/RECORD new file mode 100644 index 0000000..7fff793 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/RECORD @@ -0,0 +1,19 @@ +jsonrpcclient-4.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jsonrpcclient-4.0.2.dist-info/LICENSE,sha256=SQ2uE27Zo1SqfTzDWCYlE3f0LCChhp5VYnW31RI1x40,1106 +jsonrpcclient-4.0.2.dist-info/METADATA,sha256=BabjeUsQxu4DK15pl_rnJ7Y52H6fKbd3jQWBACWdVFk,1882 +jsonrpcclient-4.0.2.dist-info/RECORD,, +jsonrpcclient-4.0.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +jsonrpcclient-4.0.2.dist-info/top_level.txt,sha256=kp0uiza-tZq1URn-ybda_Yp7Z3i1F-rl6SPeErE1N50,14 +jsonrpcclient/__init__.py,sha256=vR_mz374-v1ndbgmq1xgzO7ktT0KQ9XavHFBAX1fFf8,275 +jsonrpcclient/__pycache__/__init__.cpython-39.pyc,, +jsonrpcclient/__pycache__/id_generators.cpython-39.pyc,, +jsonrpcclient/__pycache__/requests.cpython-39.pyc,, +jsonrpcclient/__pycache__/responses.cpython-39.pyc,, +jsonrpcclient/__pycache__/sentinels.cpython-39.pyc,, +jsonrpcclient/__pycache__/utils.cpython-39.pyc,, +jsonrpcclient/id_generators.py,sha256=-NS5C6KfdGfybmd9jl4atpIqNbRCOAd3vU5fEpstd_0,1316 +jsonrpcclient/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jsonrpcclient/requests.py,sha256=Djy2KhnZVGXDvuFVuI21qTQCHVR8Z2ONKnt25Lbv3jo,1865 +jsonrpcclient/responses.py,sha256=6wxwnQ9W-AcUrQRPLHs2fWfEon2gk7yM8rs5lexTReE,1121 +jsonrpcclient/sentinels.py,sha256=8wuismnV3msfyVHt_YNKoqP5zA-V9vWsBWCMdU3pfRI,215 +jsonrpcclient/utils.py,sha256=W1FWIR7poIiWih3iejLlqN--xVPen78Zl0nhTU2X40M,290 diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..2d9814b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient-4.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +jsonrpcclient diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/__init__.py b/.venv/lib/python3.9/site-packages/jsonrpcclient/__init__.py new file mode 100644 index 0000000..4458be4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient/__init__.py @@ -0,0 +1,13 @@ +from .requests import ( + notification, + notification_json, + request, + request_hex, + request_json, + request_json_hex, + request_json_random, + request_json_uuid, + request_random, + request_uuid, +) +from .responses import Ok, Error, parse, parse_json diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/id_generators.py b/.venv/lib/python3.9/site-packages/jsonrpcclient/id_generators.py new file mode 100644 index 0000000..b54b119 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient/id_generators.py @@ -0,0 +1,61 @@ +"""Generators which yield an id to include in a JSON-RPC request.""" +import itertools +from random import choice +from string import ascii_lowercase, digits +from typing import Iterator +from uuid import uuid4 + + +def decimal(start: int = 1) -> Iterator[int]: + """ + Increments from `start`. + + e.g. 1, 2, 3, .. 9, 10, 11, etc. + + Args: + start: The first value to start with. + """ + return itertools.count(start) + + +def hexadecimal(start: int = 1) -> Iterator[str]: + """ + Incremental hexadecimal numbers. + + e.g. 1, 2, 3, .. 9, a, b, etc. + + Args: + start: The first value to start with. + """ + while True: + yield "%x" % start + start += 1 + + +def random(length: int = 8, chars: str = digits + ascii_lowercase) -> Iterator[str]: + """ + A random string. + + Not unique, but has around 1 in a million chance of collision (with the default 8 + character length). + + Example: + 'fubui5e6' + + Args: + length: Length of the random string. + chars: The characters to randomly choose from. + """ + while True: + yield "".join([choice(chars) for _ in range(length)]) + + +def uuid() -> Iterator[str]: + """ + Unique uuid ids. + + Example: + '9bfe2c93-717e-4a45-b91b-55422c5af4ff' + """ + while True: + yield str(uuid4()) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/py.typed b/.venv/lib/python3.9/site-packages/jsonrpcclient/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/requests.py b/.venv/lib/python3.9/site-packages/jsonrpcclient/requests.py new file mode 100644 index 0000000..826ef20 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient/requests.py @@ -0,0 +1,67 @@ +import json +from functools import partial +from typing import Any, Dict, Iterator, Tuple, Union + +from . import id_generators +from .sentinels import NOID +from .utils import compose + + +def notification_pure( + method: str, params: Union[Dict[str, Any], Tuple[Any, ...]] +) -> Dict[str, Any]: + return { + "jsonrpc": "2.0", + "method": method, + **({"params": params} if params else {}), + } + + +def notification( + method: str, params: Union[Dict[str, Any], Tuple[Any, ...], None] = None +) -> Dict[str, Any]: + return notification_pure(method, params if params else ()) + + +notification_json = compose(json.dumps, notification) + + +def request_pure( + id_generator: Iterator[Any], + method: str, + params: Union[Dict[str, Any], Tuple[Any, ...]], + id: Any, +) -> Dict[str, Any]: + return { + "jsonrpc": "2.0", + "method": method, + **( + {"params": list(params) if isinstance(params, tuple) else params} + if params + else {} + ), + "id": id if id is not NOID else next(id_generator), + } + + +def request_impure( + id_generator: Iterator[Any], + method: str, + params: Union[Dict[str, Any], Tuple[Any, ...], None] = None, + id: Any = NOID, +) -> Dict[str, Any]: + return request_pure( + id_generator or id_generators.decimal(), method, params or (), id + ) + + +request_natural = partial(request_impure, id_generators.decimal()) +request_hex = partial(request_impure, id_generators.hexadecimal()) +request_random = partial(request_impure, id_generators.random()) +request_uuid = partial(request_impure, id_generators.uuid()) +request = request_natural + +request_json = compose(json.dumps, request_natural) +request_json_hex = compose(json.dumps, request_hex) +request_json_random = compose(json.dumps, request_random) +request_json_uuid = compose(json.dumps, request_uuid) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/responses.py b/.venv/lib/python3.9/site-packages/jsonrpcclient/responses.py new file mode 100644 index 0000000..123be48 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient/responses.py @@ -0,0 +1,49 @@ +from typing import Any, Dict, Iterable, List, Union, NamedTuple +import json + +from .utils import compose + +Deserialized = Union[Dict[str, Any], List[Dict[str, Any]]] + + +class Ok(NamedTuple): + result: Any + id: Any + + def __repr__(self) -> str: + return f"Ok(result={self.result!r}, id={self.id!r})" + + +class Error(NamedTuple): + code: int + message: str + data: Any + id: Any + + def __repr__(self) -> str: + return f"Error(code={self.code!r}, message={self.message!r}, data={self.data!r}, id={self.id!r})" + + +Response = Union[Ok, Error] + + +def to_result(response: Dict[str, Any]) -> Response: + return ( + Ok(response["result"], response["id"]) + if "result" in response + else Error( + response["error"]["code"], + response["error"]["message"], + response["error"].get("data"), + response["id"], + ) + ) + + +def parse(response: Deserialized) -> Union[Response, Iterable[Response]]: + return ( + map(to_result, response) if isinstance(response, list) else to_result(response) + ) + + +parse_json = compose(parse, json.loads) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/sentinels.py b/.venv/lib/python3.9/site-packages/jsonrpcclient/sentinels.py new file mode 100644 index 0000000..b46f658 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient/sentinels.py @@ -0,0 +1,12 @@ +import sys + + +class Sentinel: + def __init__(self, name: str): + self.name = name + + def __repr__(self) -> str: + return f"<{sys.intern(str(self.name)).rsplit('.', 1)[-1]}>" + + +NOID = Sentinel("NoId") diff --git a/.venv/lib/python3.9/site-packages/jsonrpcclient/utils.py b/.venv/lib/python3.9/site-packages/jsonrpcclient/utils.py new file mode 100644 index 0000000..2caaf7e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcclient/utils.py @@ -0,0 +1,9 @@ +from functools import reduce +from typing import Any, Callable + + +def compose(*fs: Callable[..., Any]) -> Callable[..., Any]: + def compose2(f: Callable[..., Any], g: Callable[..., Any]) -> Callable[..., Any]: + return lambda *a, **kw: f(g(*a, **kw)) + + return reduce(compose2, fs) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/LICENSE new file mode 100644 index 0000000..8873548 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021 Beau Barker (beau at explodinglabs.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/METADATA b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/METADATA new file mode 100644 index 0000000..145774b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/METADATA @@ -0,0 +1,56 @@ +Metadata-Version: 2.1 +Name: jsonrpcserver +Version: 5.0.7 +Summary: Process JSON-RPC requests +Home-page: https://github.com/explodinglabs/jsonrpcserver +Author: Beau Barker +Author-email: beau@explodinglabs.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: jsonschema (<5) +Requires-Dist: oslash (<1) +Provides-Extra: examples +Requires-Dist: aiohttp ; extra == 'examples' +Requires-Dist: aiozmq ; extra == 'examples' +Requires-Dist: flask ; extra == 'examples' +Requires-Dist: flask-socketio ; extra == 'examples' +Requires-Dist: gmqtt ; extra == 'examples' +Requires-Dist: pyzmq ; extra == 'examples' +Requires-Dist: tornado ; extra == 'examples' +Requires-Dist: websockets ; extra == 'examples' +Requires-Dist: werkzeug ; extra == 'examples' + +jsonrpcserver + +![PyPI](https://img.shields.io/pypi/v/jsonrpcserver.svg) +![Code Quality](https://github.com/explodinglabs/jsonrpcserver/actions/workflows/code-quality.yml/badge.svg) +![Coverage Status](https://coveralls.io/repos/github/explodinglabs/jsonrpcserver/badge.svg?branch=main) +![Downloads](https://img.shields.io/pypi/dm/jsonrpcserver.svg) + +Process incoming JSON-RPC requests in Python. + +```python +from jsonrpcserver import Success, method, serve + +@method +def ping(): + return Success("pong") + +if __name__ == "__main__": + serve() +``` + +Full documentation is at [jsonrpcserver.com](https://www.jsonrpcserver.com/). + +See also: [jsonrpcclient](https://github.com/explodinglabs/jsonrpcclient) + + diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/RECORD b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/RECORD new file mode 100644 index 0000000..db787fb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/RECORD @@ -0,0 +1,36 @@ +jsonrpcserver-5.0.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jsonrpcserver-5.0.7.dist-info/LICENSE,sha256=_FwZBYAN8tqi6GIMUwE3E9UUG06-g-gEjYUrCr7c2Gg,1106 +jsonrpcserver-5.0.7.dist-info/METADATA,sha256=6HZeetp18tphunIWQ58-CSiQ95uTXop2D4AuprlrEso,1783 +jsonrpcserver-5.0.7.dist-info/RECORD,, +jsonrpcserver-5.0.7.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +jsonrpcserver-5.0.7.dist-info/top_level.txt,sha256=UpzcVtd6PqlNcQwPaQbEkHH0HCDTGewm8JKRrLSsGbo,14 +jsonrpcserver/__init__.py,sha256=lo7Dl9fYocucehMa3Am2zxdTSVBRCFJU0dx64T6MrW0,764 +jsonrpcserver/__pycache__/__init__.cpython-39.pyc,, +jsonrpcserver/__pycache__/async_dispatcher.cpython-39.pyc,, +jsonrpcserver/__pycache__/async_main.cpython-39.pyc,, +jsonrpcserver/__pycache__/codes.cpython-39.pyc,, +jsonrpcserver/__pycache__/dispatcher.cpython-39.pyc,, +jsonrpcserver/__pycache__/exceptions.cpython-39.pyc,, +jsonrpcserver/__pycache__/main.cpython-39.pyc,, +jsonrpcserver/__pycache__/methods.cpython-39.pyc,, +jsonrpcserver/__pycache__/request.cpython-39.pyc,, +jsonrpcserver/__pycache__/response.cpython-39.pyc,, +jsonrpcserver/__pycache__/result.cpython-39.pyc,, +jsonrpcserver/__pycache__/sentinels.cpython-39.pyc,, +jsonrpcserver/__pycache__/server.cpython-39.pyc,, +jsonrpcserver/__pycache__/utils.cpython-39.pyc,, +jsonrpcserver/async_dispatcher.py,sha256=oYxM7FJd77IxFhkPFAYhLKIha5-ZEtnMr0Ncde2Scww,3007 +jsonrpcserver/async_main.py,sha256=uRpbT18XQxWPnm2Qpu8gOC3z9Ri5ulm1c70j30EjwmI,1726 +jsonrpcserver/codes.py,sha256=AsNeg8cx0npfCLOX9JxZf-IJGbckqV4C9l1K9xYGqfQ,260 +jsonrpcserver/dispatcher.py,sha256=DO1KyNzAw_WEVjgIJhuncAALIxXLH5RHGtR5aaDJqkY,10082 +jsonrpcserver/exceptions.py,sha256=-lTAUiYq1WVLOdJ9igaI7ZysUDy6LMPm2EEyArrg87Y,408 +jsonrpcserver/main.py,sha256=aNhFbZXaJlHxtLv0_HIsbtPz_-_M2WIg3eH39Wy8Gas,4245 +jsonrpcserver/methods.py,sha256=sXOIAf-IGuCsPgZxv6x0R8UcG_z8Jl4vgksRTxlCCmg,1389 +jsonrpcserver/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jsonrpcserver/request-schema.json,sha256=JY5j2q8hU6hbJBdyv7I932YDw5AjuMmw-v3KUDVyvTc,1277 +jsonrpcserver/request.py,sha256=wTMYYS-5PTb3GQBqbGzZAS8v94jmKH5O8EOazvpXE74,374 +jsonrpcserver/response.py,sha256=Rqslse-EvFIoPMp0MPpKmQ-PMARV9JO3YOIz5Kbvzfo,3139 +jsonrpcserver/result.py,sha256=s1sfhBzdg0lafkBrDSwFpRgc8cbTzRqOy_472tRYN9w,1942 +jsonrpcserver/sentinels.py,sha256=JUG4MQrLEPc7I9ZdsJxLGxS3JqslN7BK2nciEeqLnGE,407 +jsonrpcserver/server.py,sha256=-9eZh8NMwUM0Sj4gWDqnzUR55BvSb014WEBP9ifL1Dw,683 +jsonrpcserver/utils.py,sha256=xZogVpT18FhouvOmnRZ7LU-1ujMCSlHiv0BURjz7iXE,408 diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/top_level.txt new file mode 100644 index 0000000..e1fe4b4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver-5.0.7.dist-info/top_level.txt @@ -0,0 +1 @@ +jsonrpcserver diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/__init__.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/__init__.py new file mode 100644 index 0000000..f28d26c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/__init__.py @@ -0,0 +1,28 @@ +"""Use __all__ so mypy considers these re-exported.""" +__all__ = [ + "Error", + "InvalidParams", + "JsonRpcError", + "Result", + "Success", + "async_dispatch", + "async_dispatch_to_response", + "async_dispatch_to_serializable", + "dispatch", + "dispatch_to_response", + "dispatch_to_serializable", + "method", + "serve", +] + + +from .async_main import ( + dispatch as async_dispatch, + dispatch_to_response as async_dispatch_to_response, + dispatch_to_serializable as async_dispatch_to_serializable, +) +from .exceptions import JsonRpcError +from .main import dispatch, dispatch_to_response, dispatch_to_serializable +from .methods import method +from .result import Error, InvalidParams, Result, Success +from .server import serve as serve diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/async_dispatcher.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/async_dispatcher.py new file mode 100644 index 0000000..2ed3e8a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/async_dispatcher.py @@ -0,0 +1,104 @@ +"""Async version of dispatcher.py""" + +from functools import partial +from itertools import starmap +from typing import Any, Callable, Iterable, Tuple, Union +import asyncio +import logging + +from oslash.either import Left # type: ignore + +from .dispatcher import ( + Deserialized, + create_request, + deserialize_request, + extract_args, + extract_kwargs, + extract_list, + get_method, + not_notification, + to_response, + validate_args, + validate_request, + validate_result, +) +from .exceptions import JsonRpcError +from .methods import Method, Methods +from .request import Request +from .result import Result, InternalErrorResult, ErrorResult +from .response import Response, ServerErrorResponse +from .utils import make_list + + +async def call(request: Request, context: Any, method: Method) -> Result: + try: + result = await method( + *extract_args(request, context), **extract_kwargs(request) + ) + validate_result(result) + except JsonRpcError as exc: + return Left(ErrorResult(code=exc.code, message=exc.message, data=exc.data)) + except Exception as exc: # Other error inside method - Internal error + logging.exception(exc) + return Left(InternalErrorResult(str(exc))) + return result + + +async def dispatch_request( + methods: Methods, context: Any, request: Request +) -> Tuple[Request, Result]: + method = get_method(methods, request.method).bind( + partial(validate_args, request, context) + ) + return ( + request, + method + if isinstance(method, Left) + else await call(request, context, method._value), + ) + + +async def dispatch_deserialized( + methods: Methods, + context: Any, + post_process: Callable[[Response], Iterable[Any]], + deserialized: Deserialized, +) -> Union[Response, Iterable[Response], None]: + results = await asyncio.gather( + *( + dispatch_request(methods, context, r) + for r in map(create_request, make_list(deserialized)) + ) + ) + return extract_list( + isinstance(deserialized, list), + map( + post_process, + starmap(to_response, filter(not_notification, results)), + ), + ) + + +async def dispatch_to_response_pure( + *, + deserializer: Callable[[str], Deserialized], + validator: Callable[[Deserialized], Deserialized], + methods: Methods, + context: Any, + post_process: Callable[[Response], Iterable[Any]], + request: str, +) -> Union[Response, Iterable[Response], None]: + try: + result = deserialize_request(deserializer, request).bind( + partial(validate_request, validator) + ) + return ( + post_process(result) + if isinstance(result, Left) + else await dispatch_deserialized( + methods, context, post_process, result._value + ) + ) + except Exception as exc: + logging.exception(exc) + return post_process(Left(ServerErrorResponse(str(exc), None))) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/async_main.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/async_main.py new file mode 100644 index 0000000..a5db4fb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/async_main.py @@ -0,0 +1,53 @@ +"""Async version of main.py. The public async functions.""" +import json +from typing import Any, Callable, Dict, Iterable, List, Optional, Union, cast + +from .async_dispatcher import dispatch_to_response_pure +from .dispatcher import Deserialized +from .main import default_validator, default_deserializer +from .methods import Methods, global_methods +from .response import Response, to_serializable +from .sentinels import NOCONTEXT +from .utils import identity + + +async def dispatch_to_response( + request: str, + methods: Optional[Methods] = None, + *, + context: Any = NOCONTEXT, + deserializer: Callable[[str], Deserialized] = default_deserializer, + validator: Callable[[Deserialized], Deserialized] = default_validator, + post_process: Callable[[Response], Any] = identity, +) -> Union[Response, Iterable[Response], None]: + return await dispatch_to_response_pure( + deserializer=deserializer, + validator=validator, + post_process=post_process, + context=context, + methods=global_methods if methods is None else methods, + request=request, + ) + + +async def dispatch_to_serializable( + *args: Any, **kwargs: Any +) -> Union[Dict[str, Any], List[Dict[str, Any]], None]: + return cast( + Union[Dict[str, Any], List[Dict[str, Any]], None], + await dispatch_to_response(*args, post_process=to_serializable, **kwargs), + ) + + +async def dispatch_to_json( + *args: Any, + serializer: Callable[ + [Union[Dict[str, Any], List[Dict[str, Any]], None]], str + ] = json.dumps, + **kwargs: Any, +) -> str: + response = await dispatch_to_serializable(*args, **kwargs) + return "" if response is None else serializer(response) + + +dispatch = dispatch_to_json diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/codes.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/codes.py new file mode 100644 index 0000000..6634f63 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/codes.py @@ -0,0 +1,8 @@ +"""JSONRPC error codes from http://www.jsonrpc.org/specification#error_object""" + +ERROR_PARSE_ERROR = -32700 +ERROR_INVALID_REQUEST = -32600 +ERROR_METHOD_NOT_FOUND = -32601 +ERROR_INVALID_PARAMS = -32602 +ERROR_INTERNAL_ERROR = -32603 +ERROR_SERVER_ERROR = -32000 diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/dispatcher.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/dispatcher.py new file mode 100644 index 0000000..74e2644 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/dispatcher.py @@ -0,0 +1,282 @@ +"""Dispatcher - does the hard work of this library: parses, validates and dispatches +requests, providing responses. +""" +from functools import partial +from inspect import signature +from itertools import starmap +from typing import Any, Callable, Dict, Iterable, List, Tuple, Union +import logging + +from oslash.either import Either, Left, Right # type: ignore + +from .exceptions import JsonRpcError +from .methods import Method, Methods +from .request import Request +from .response import ( + ErrorResponse, + InvalidRequestResponse, + ParseErrorResponse, + Response, + ServerErrorResponse, + SuccessResponse, +) +from .result import ( + ErrorResult, + InternalErrorResult, + InvalidParamsResult, + MethodNotFoundResult, + Result, + SuccessResult, +) +from .sentinels import NOCONTEXT, NOID +from .utils import compose, make_list + +Deserialized = Union[Dict[str, Any], List[Dict[str, Any]]] + + +def extract_list( + is_batch: bool, responses: Iterable[Response] +) -> Union[Response, List[Response], None]: + """This is the inverse of make_list. Here we extract a response back out of the list + if it wasn't a batch request originally. Also applies a JSON-RPC rule: we do not + respond to batches of notifications. + + Args: + is_batch: True if the original request was a batch. + responses: Iterable of responses. + + Returns: A single response, a batch of responses, or None (returns None to a + notification or batch of notifications, to indicate we should not respond). + """ + # Need to materialize the iterable here to determine if it's empty. At least we're + # at the end of processing (also need a list, not a generator, to serialize a batch + # response with json.dumps). + response_list = list(responses) + # Responses have been removed, so in the case of either a single notification or a + # batch of only notifications, return None + if len(response_list) == 0: + return None + # For batches containing at least one non-notification, return the list + elif is_batch: + return response_list + # For single requests, extract it back from the list (there will be only one). + else: + return response_list[0] + + +def to_response(request: Request, result: Result) -> Response: + """Maps a Request plus a Result to a Response. A Response is just a Result plus the + id from the original Request. + + Raises: AssertionError if the request is a notification. Notifications can't be + responded to. If a notification is given and AssertionError is raised, we should + respond with Server Error, because notifications should have been removed by + this stage. + + Returns: A Response. + """ + assert request.id is not NOID + return ( + Left(ErrorResponse(**result._error._asdict(), id=request.id)) + if isinstance(result, Left) + else Right(SuccessResponse(**result._value._asdict(), id=request.id)) + ) + + +def extract_args(request: Request, context: Any) -> List[Any]: + """Extracts the positional arguments from the request. + + If a context object is given, it's added as the first argument. + + Returns: A list containing the positional arguments. + """ + params = request.params if isinstance(request.params, list) else [] + return [context] + params if context is not NOCONTEXT else params + + +def extract_kwargs(request: Request) -> Dict[str, Any]: + """Extracts the keyword arguments from the reqeust. + + Returns: A dict containing the keyword arguments. + """ + return request.params if isinstance(request.params, dict) else {} + + +def validate_result(result: Result) -> None: + """Validate the return value from a method. + + Raises an AssertionError if the result returned from a method is invalid. + + Returns: None + """ + assert (isinstance(result, Left) and isinstance(result._error, ErrorResult)) or ( + isinstance(result, Right) and isinstance(result._value, SuccessResult) + ), f"The method did not return a valid Result (returned {result!r})" + + +def call(request: Request, context: Any, method: Method) -> Result: + """Call the method. + + Handles any exceptions raised in the method, being sure to return an Error response. + + Returns: A Result. + """ + try: + result = method(*extract_args(request, context), **extract_kwargs(request)) + # validate_result raises AssertionError if the return value is not a valid + # Result, which should respond with Internal Error because its a problem in the + # method. + validate_result(result) + # Raising JsonRpcError inside the method is an alternative way of returning an error + # response. + except JsonRpcError as exc: + return Left(ErrorResult(code=exc.code, message=exc.message, data=exc.data)) + # Any other uncaught exception inside method - internal error. + except Exception as exc: + logging.exception(exc) + return Left(InternalErrorResult(str(exc))) + return result + + +def validate_args( + request: Request, context: Any, func: Method +) -> Either[ErrorResult, Method]: + """Ensure the method can be called with the arguments given. + + Returns: Either the function to be called, or an Invalid Params error result. + """ + try: + signature(func).bind(*extract_args(request, context), **extract_kwargs(request)) + except TypeError as exc: + return Left(InvalidParamsResult(str(exc))) + return Right(func) + + +def get_method(methods: Methods, method_name: str) -> Either[ErrorResult, Method]: + """Get the requested method from the methods dict. + + Returns: Either the function to be called, or a Method Not Found result. + """ + try: + return Right(methods[method_name]) + except KeyError: + return Left(MethodNotFoundResult(method_name)) + + +def dispatch_request( + methods: Methods, context: Any, request: Request +) -> Tuple[Request, Result]: + """Get the method, validates the arguments and calls the method. + + Returns: A tuple containing the Result of the method, along with the original + Request. We need the ids from the original request to remove notifications + before responding, and create a Response. + """ + return ( + request, + get_method(methods, request.method) + .bind(partial(validate_args, request, context)) + .bind(partial(call, request, context)), + ) + + +def create_request(request: Dict[str, Any]) -> Request: + """Create a Request namedtuple from a dict.""" + return Request( + request["method"], request.get("params", []), request.get("id", NOID) + ) + + +def not_notification(request_result: Any) -> bool: + """True if the request was not a notification. + + Used to filter out notifications from the list of responses. + """ + return request_result[0].id is not NOID + + +def dispatch_deserialized( + methods: Methods, + context: Any, + post_process: Callable[[Response], Iterable[Any]], + deserialized: Deserialized, +) -> Union[Response, List[Response], None]: + """This is simply continuing the pipeline from dispatch_to_response_pure. It exists + only to be an abstraction, otherwise that function is doing too much. It continues + on from the request string having been parsed and validated. + + Returns: A Response, a list of Responses, or None. If post_process is passed, it's + applied to the Response(s). + """ + results = map( + compose(partial(dispatch_request, methods, context), create_request), + make_list(deserialized), + ) + responses = starmap(to_response, filter(not_notification, results)) + return extract_list(isinstance(deserialized, list), map(post_process, responses)) + + +def validate_request( + validator: Callable[[Deserialized], Deserialized], request: Deserialized +) -> Either[ErrorResponse, Deserialized]: + """Validate the request against a JSON-RPC schema. + + Ensures the parsed request is valid JSON-RPC. + + Returns: Either the same request passed in or an Invalid request response. + """ + try: + validator(request) + # Since the validator is unknown, the specific exception that will be raised is also + # unknown. Any exception raised we assume the request is invalid and return an + # "invalid request" response. + except Exception as exc: + return Left(InvalidRequestResponse("The request failed schema validation")) + return Right(request) + + +def deserialize_request( + deserializer: Callable[[str], Deserialized], request: str +) -> Either[ErrorResponse, Deserialized]: + """Parse the JSON request string. + + Returns: Either the deserialized request or a "Parse Error" response. + """ + try: + return Right(deserializer(request)) + # Since the deserializer is unknown, the specific exception that will be raised is + # also unknown. Any exception raised we assume the request is invalid, return a + # parse error response. + except Exception as exc: + return Left(ParseErrorResponse(str(exc))) + + +def dispatch_to_response_pure( + *, + deserializer: Callable[[str], Deserialized], + validator: Callable[[Deserialized], Deserialized], + methods: Methods, + context: Any, + post_process: Callable[[Response], Iterable[Any]], + request: str, +) -> Union[Response, List[Response], None]: + """A function from JSON-RPC request string to Response namedtuple(s), (yet to be + serialized to json). + + Returns: A single Response, a list of Responses, or None. None is given for + notifications or batches of notifications, to indicate that we should not + respond. + """ + try: + result = deserialize_request(deserializer, request).bind( + partial(validate_request, validator) + ) + return ( + post_process(result) + if isinstance(result, Left) + else dispatch_deserialized(methods, context, post_process, result._value) + ) + except Exception as exc: + # There was an error with the jsonrpcserver library. + logging.exception(exc) + return post_process(Left(ServerErrorResponse(str(exc), None))) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/exceptions.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/exceptions.py new file mode 100644 index 0000000..a8e8bcd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/exceptions.py @@ -0,0 +1,10 @@ +"""A JsonRpcError exception can be raised from inside a method, as an alternative way to +return an error response. See https://github.com/explodinglabs/jsonrpcserver/discussions/158 +""" +from typing import Any +from .sentinels import NODATA + + +class JsonRpcError(Exception): + def __init__(self, code: int, message: str, data: Any = NODATA): + self.code, self.message, self.data = (code, message, data) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/main.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/main.py new file mode 100644 index 0000000..0397a58 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/main.py @@ -0,0 +1,115 @@ +"""The public functions. + +These three public functions all perform the same function of dispatching a JSON-RPC +request, but they each give a different return value. + +- dispatch_to_responses: Returns Response(s) (or None for notifications). +- dispatch_to_serializable: Returns a Python dict or list of dicts (or None for + notifications). +- dispatch_to_json/dispatch: Returns a JSON-RPC response string (or an empty string for + notifications). +""" +from typing import Any, Callable, Dict, List, Optional, Union, cast +import json + +from jsonschema.validators import validator_for # type: ignore +from pkg_resources import resource_string + +from .dispatcher import dispatch_to_response_pure, Deserialized +from .methods import Methods, global_methods +from .response import Response, to_serializable_one +from .sentinels import NOCONTEXT +from .utils import identity + + +default_deserializer = json.loads + +# Prepare the jsonschema validator. This is global so it loads only once, not every +# time dispatch is called. +schema = json.loads(resource_string(__name__, "request-schema.json")) +klass = validator_for(schema) +klass.check_schema(schema) +default_validator = klass(schema).validate + + +def dispatch_to_response( + request: str, + methods: Optional[Methods] = None, + *, + context: Any = NOCONTEXT, + deserializer: Callable[[str], Deserialized] = json.loads, + validator: Callable[[Deserialized], Deserialized] = default_validator, + post_process: Callable[[Response], Any] = identity, +) -> Union[Response, List[Response], None]: + """Takes a JSON-RPC request string and dispatches it to method(s), giving Response + namedtuple(s) or None. + + This is a public wrapper around dispatch_to_response_pure, adding globals and + default values to be nicer for end users. + + Args: + request: The JSON-RPC request string. + methods: Dictionary of methods that can be called - mapping of function names to + functions. If not passed, uses the internal global_methods dict which is + populated with the @method decorator. + context: If given, will be passed as the first argument to methods. + deserializer: Function that deserializes the request string. + validator: Function that validates the JSON-RPC request. The function should + raise an exception if the request is invalid. To disable validation, pass + lambda _: None. + post_process: Function that will be applied to Responses. + + Returns: + A Response, list of Responses or None. + + Examples: + >>> dispatch('{"jsonrpc": "2.0", "method": "ping", "id": 1}') + '{"jsonrpc": "2.0", "result": "pong", "id": 1}' + """ + return dispatch_to_response_pure( + deserializer=deserializer, + validator=validator, + post_process=post_process, + context=context, + methods=global_methods if methods is None else methods, + request=request, + ) + + +def dispatch_to_serializable( + *args: Any, **kwargs: Any +) -> Union[Dict[str, Any], List[Dict[str, Any]], None]: + """Takes a JSON-RPC request string and dispatches it to method(s), giving responses + as dicts (or None). + """ + return cast( + Union[Dict[str, Any], List[Dict[str, Any]], None], + dispatch_to_response(*args, post_process=to_serializable_one, **kwargs), + ) + + +def dispatch_to_json( + *args: Any, + serializer: Callable[ + [Union[Dict[str, Any], List[Dict[str, Any]], str]], str + ] = json.dumps, + **kwargs: Any, +) -> str: + """Takes a JSON-RPC request string and dispatches it to method(s), giving a JSON-RPC + response string. + + This is the main public method, it goes through the entire JSON-RPC process - it's a + function from JSON-RPC request string to JSON-RPC response string. + + Args: + serializer: A function to serialize a Python object to json. + The rest: Passed through to dispatch_to_serializable. + """ + response = dispatch_to_serializable(*args, **kwargs) + # Better to respond with the empty string instead of json "null", because "null" is + # an invalid JSON-RPC response. + return "" if response is None else serializer(response) + + +# "dispatch" aliases dispatch_to_json. +dispatch = dispatch_to_json diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/methods.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/methods.py new file mode 100644 index 0000000..f0d8fbe --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/methods.py @@ -0,0 +1,43 @@ +"""A method is a Python function that can be called by a JSON-RPC request. + +They're held in a dict, a mapping of function names to functions. + +The @method decorator adds a method to jsonrpcserver's internal global_methods dict. +Alternatively pass your own dictionary of methods to `dispatch` with the methods param. + + >>> dispatch(request) # Uses the internal collection of funcs added with @method + >>> dispatch(request, methods={"ping": lambda: "pong"}) # Custom collection + +Methods can take either positional or named arguments, but not both. This is a +limitation of JSON-RPC. +""" +from typing import Any, Callable, Dict, Optional, cast + +from .result import Result + +Method = Callable[..., Result] +Methods = Dict[str, Method] + +global_methods = dict() + + +def method( + f: Optional[Method] = None, name: Optional[str] = None +) -> Callable[..., Any]: + """A decorator to add a function into jsonrpcserver's internal global_methods dict. + The global_methods dict will be used by default unless a methods argument is passed + to `dispatch`. + + Functions can be renamed by passing a name argument: + + @method(name=bar) + def foo(): + ... + """ + + def decorator(func: Method) -> Method: + nonlocal name + global_methods[name or func.__name__] = func + return func + + return decorator(f) if callable(f) else cast(Method, decorator) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/py.typed b/.venv/lib/python3.9/site-packages/jsonrpcserver/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/request-schema.json b/.venv/lib/python3.9/site-packages/jsonrpcserver/request-schema.json new file mode 100644 index 0000000..52bb147 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/request-schema.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "A JSON RPC 2.0 request", + "oneOf": [ + { + "description": "An individual request", + "$ref": "#/definitions/request" + }, + { + "description": "An array of requests", + "type": "array", + "items": { "$ref": "#/definitions/request" }, + "minItems": 1 + } + ], + "definitions": { + "request": { + "type": "object", + "required": [ "jsonrpc", "method" ], + "properties": { + "jsonrpc": { "enum": [ "2.0" ] }, + "method": { + "type": "string" + }, + "id": { + "type": [ "string", "number", "null" ], + "note": [ + "While allowed, null should be avoided: http://www.jsonrpc.org/specification#id1", + "While allowed, a number with a fractional part should be avoided: http://www.jsonrpc.org/specification#id2" + ] + }, + "params": { + "type": [ "array", "object" ] + } + }, + "additionalProperties": false + } + } +} diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/request.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/request.py new file mode 100644 index 0000000..958c36e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/request.py @@ -0,0 +1,12 @@ +"""A simple namedtuple to hold a request. + +After parsing the request string, we put the requests (which are dicts) into these +Request namedtuples because they're nicer to work with. +""" +from typing import Any, Dict, List, NamedTuple, Union + + +class Request(NamedTuple): + method: str + params: Union[List[Any], Dict[str, Any]] + id: Any # Use NOID for a Notification. diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/response.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/response.py new file mode 100644 index 0000000..cc5d010 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/response.py @@ -0,0 +1,103 @@ +"""The response data types. + +https://www.jsonrpc.org/specification#response_object +""" +from typing import Any, Dict, List, Type, NamedTuple, Union + +from oslash.either import Either, Left # type: ignore + +from .codes import ( + ERROR_INVALID_REQUEST, + ERROR_METHOD_NOT_FOUND, + ERROR_PARSE_ERROR, + ERROR_SERVER_ERROR, +) +from .sentinels import NODATA + +Deserialized = Union[Dict[str, Any], List[Dict[str, Any]]] + + +class SuccessResponse(NamedTuple): + """ + It would be nice to subclass Success here, adding only id. But it's not possible to + easily subclass NamedTuples in Python 3.6. (I believe it can be done in 3.8.) + """ + + result: str + id: Any + + +class ErrorResponse(NamedTuple): + """ + It would be nice to subclass Error here, adding only id. But it's not possible to + easily subclass NamedTuples in Python 3.6. (I believe it can be done in 3.8.) + """ + + code: int + message: str + data: Any + id: Any + + +Response = Either[ErrorResponse, SuccessResponse] +ResponseType = Type[Either[ErrorResponse, SuccessResponse]] + + +def ParseErrorResponse(data: Any) -> ErrorResponse: + """ + From the spec: "This (id) member is REQUIRED. It MUST be the same as the value of + the id member in the Request Object. If there was an error in detecting the id in + the Request object (e.g. Parse error/Invalid Request), it MUST be Null." + """ + return ErrorResponse(ERROR_PARSE_ERROR, "Parse error", data, None) + + +def InvalidRequestResponse(data: Any) -> ErrorResponse: + """ + From the spec: "This (id) member is REQUIRED. It MUST be the same as the value of + the id member in the Request Object. If there was an error in detecting the id in + the Request object (e.g. Parse error/Invalid Request), it MUST be Null." + """ + return ErrorResponse(ERROR_INVALID_REQUEST, "Invalid request", data, None) + + +def MethodNotFoundResponse(data: Any, id: Any) -> ErrorResponse: + return ErrorResponse(ERROR_METHOD_NOT_FOUND, "Method not found", data, id) + + +def ServerErrorResponse(data: Any, id: Any) -> ErrorResponse: + return ErrorResponse(ERROR_SERVER_ERROR, "Server error", data, id) + + +def serialize_error(response: ErrorResponse) -> Dict[str, Any]: + return { + "jsonrpc": "2.0", + "error": { + "code": response.code, + "message": response.message, + # "data" may be omitted. + **({"data": response.data} if response.data is not NODATA else {}), + }, + "id": response.id, + } + + +def serialize_success(response: SuccessResponse) -> Dict[str, Any]: + return {"jsonrpc": "2.0", "result": response.result, "id": response.id} + + +def to_serializable_one(response: ResponseType) -> Union[Deserialized, None]: + return ( + serialize_error(response._error) + if isinstance(response, Left) + else serialize_success(response._value) + ) + + +def to_serializable(response: ResponseType) -> Union[Deserialized, None]: + if response is None: + return None + elif isinstance(response, List): + return [to_serializable_one(r) for r in response] + else: + return to_serializable_one(response) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/result.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/result.py new file mode 100644 index 0000000..889b24e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/result.py @@ -0,0 +1,67 @@ +"""Result data types - the results of calling a method. + +Results are the JSON-RPC response objects +(https://www.jsonrpc.org/specification#response_object), minus the "jsonrpc" and "id" +parts - the library takes care of these parts for you. + +The public functions are Success, Error and InvalidParams. +""" +from typing import Any, NamedTuple + +from oslash.either import Either, Left, Right # type: ignore + +from .codes import ERROR_INVALID_PARAMS, ERROR_METHOD_NOT_FOUND, ERROR_INTERNAL_ERROR +from .sentinels import NODATA + + +class SuccessResult(NamedTuple): + result: Any = None + + def __repr__(self) -> str: + return f"SuccessResult({self.result!r})" + + +class ErrorResult(NamedTuple): + code: int + message: str + data: Any = NODATA # The spec says this value may be omitted + + def __repr__(self) -> str: + return f"ErrorResult(code={self.code!r}, message={self.message!r}, data={self.data!r})" + + +# Union of the two valid result types +Result = Either[ErrorResult, SuccessResult] + + +# Helpers + + +def MethodNotFoundResult(data: Any) -> ErrorResult: + return ErrorResult(ERROR_METHOD_NOT_FOUND, "Method not found", data) + + +def InternalErrorResult(data: Any) -> ErrorResult: + return ErrorResult(ERROR_INTERNAL_ERROR, "Internal error", data) + + +def InvalidParamsResult(data: Any = NODATA) -> ErrorResult: + return ErrorResult(ERROR_INVALID_PARAMS, "Invalid params", data) + + +# Helpers (the public functions) + + +def Success(*args: Any, **kwargs: Any) -> Either[ErrorResult, SuccessResult]: + return Right(SuccessResult(*args, **kwargs)) + + +def Error(*args: Any, **kwargs: Any) -> Either[ErrorResult, SuccessResult]: + return Left(ErrorResult(*args, **kwargs)) + + +def InvalidParams(*args: Any, **kwargs: Any) -> Either[ErrorResult, SuccessResult]: + """InvalidParams is a shortcut to save you from having to pass the Invalid Params + JSON-RPC code to Error. + """ + return Left(InvalidParamsResult(*args, **kwargs)) diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/sentinels.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/sentinels.py new file mode 100644 index 0000000..22cd64e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/sentinels.py @@ -0,0 +1,19 @@ +"""Sentinels - these are used to indicate no data is present. + +We can't use None, because None may be a valid piece of data. +""" + +import sys + + +class Sentinel: + def __init__(self, name: str): + self.name = name + + def __repr__(self) -> str: + return f"<{sys.intern(str(self.name)).rsplit('.', 1)[-1]}>" + + +NOCONTEXT = Sentinel("NoContext") +NODATA = Sentinel("NoData") +NOID = Sentinel("NoId") diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/server.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/server.py new file mode 100644 index 0000000..bab5f91 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/server.py @@ -0,0 +1,21 @@ +import logging +from http.server import BaseHTTPRequestHandler, HTTPServer + +from .main import dispatch + + +class RequestHandler(BaseHTTPRequestHandler): + def do_POST(self) -> None: + response = dispatch( + self.rfile.read(int(str(self.headers["Content-Length"]))).decode() + ) + if response is not None: + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + self.wfile.write(str(response).encode()) + + +def serve(name: str = "", port: int = 5000) -> None: + logging.info(" * Listening on port %s", port) + HTTPServer((name, port), RequestHandler).serve_forever() diff --git a/.venv/lib/python3.9/site-packages/jsonrpcserver/utils.py b/.venv/lib/python3.9/site-packages/jsonrpcserver/utils.py new file mode 100644 index 0000000..cd61e91 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonrpcserver/utils.py @@ -0,0 +1,16 @@ +from functools import reduce +from typing import Any, Callable, List + + +identity = lambda x: x + + +def compose(*fs: Callable[..., Any]) -> Callable[..., Any]: + def compose2(f: Callable[..., Any], g: Callable[..., Any]) -> Callable[..., Any]: + return lambda *a, **kw: f(g(*a, **kw)) + + return reduce(compose2, fs) + + +def make_list(x: Any) -> List[Any]: + return [x] if not isinstance(x, list) else x diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/COPYING b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/COPYING new file mode 100644 index 0000000..af9cfbd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/COPYING @@ -0,0 +1,19 @@ +Copyright (c) 2013 Julian Berman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/METADATA new file mode 100644 index 0000000..5606353 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/METADATA @@ -0,0 +1,200 @@ +Metadata-Version: 2.1 +Name: jsonschema +Version: 4.4.0 +Summary: An implementation of JSON Schema validation for Python +Home-page: https://github.com/Julian/jsonschema +Author: Julian Berman +Author-email: Julian@GrayVines.com +License: MIT +Project-URL: Documentation, https://python-jsonschema.readthedocs.io/en/latest/ +Project-URL: Source, https://github.com/Julian/jsonschema +Project-URL: Issues, https://github.com/Julian/jsonschema/issues/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: COPYING +Requires-Dist: attrs (>=17.4.0) +Requires-Dist: pyrsistent (!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0) +Requires-Dist: importlib-metadata ; python_version < "3.8" +Requires-Dist: typing-extensions ; python_version < "3.8" +Requires-Dist: importlib-resources (>=1.4.0) ; python_version < "3.9" +Provides-Extra: format +Requires-Dist: fqdn ; extra == 'format' +Requires-Dist: idna ; extra == 'format' +Requires-Dist: isoduration ; extra == 'format' +Requires-Dist: jsonpointer (>1.13) ; extra == 'format' +Requires-Dist: rfc3339-validator ; extra == 'format' +Requires-Dist: rfc3987 ; extra == 'format' +Requires-Dist: uri-template ; extra == 'format' +Requires-Dist: webcolors (>=1.11) ; extra == 'format' +Provides-Extra: format_nongpl +Requires-Dist: fqdn ; extra == 'format_nongpl' +Requires-Dist: idna ; extra == 'format_nongpl' +Requires-Dist: isoduration ; extra == 'format_nongpl' +Requires-Dist: jsonpointer (>1.13) ; extra == 'format_nongpl' +Requires-Dist: rfc3339-validator ; extra == 'format_nongpl' +Requires-Dist: rfc3986-validator (>0.1.0) ; extra == 'format_nongpl' +Requires-Dist: uri-template ; extra == 'format_nongpl' +Requires-Dist: webcolors (>=1.11) ; extra == 'format_nongpl' + +========== +jsonschema +========== + +|PyPI| |Pythons| |CI| |ReadTheDocs| |Precommit| |Zenodo| + +.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg + :alt: PyPI version + :target: https://pypi.org/project/jsonschema/ + +.. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema.svg + :alt: Supported Python versions + :target: https://pypi.org/project/jsonschema/ + +.. |CI| image:: https://github.com/Julian/jsonschema/workflows/CI/badge.svg + :alt: Build status + :target: https://github.com/Julian/jsonschema/actions?query=workflow%3ACI + +.. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat + :alt: ReadTheDocs status + :target: https://python-jsonschema.readthedocs.io/en/stable/ + +.. |Precommit| image:: https://results.pre-commit.ci/badge/github/Julian/jsonschema/main.svg + :alt: pre-commit.ci status + :target: https://results.pre-commit.ci/latest/github/Julian/jsonschema/main + +.. |Zenodo| image:: https://zenodo.org/badge/3072629.svg + :target: https://zenodo.org/badge/latestdoi/3072629 + + +``jsonschema`` is an implementation of the `JSON Schema +`_ specification for Python. + +.. code-block:: python + + >>> from jsonschema import validate + + >>> # A sample schema, like what we'd get from json.load() + >>> schema = { + ... "type" : "object", + ... "properties" : { + ... "price" : {"type" : "number"}, + ... "name" : {"type" : "string"}, + ... }, + ... } + + >>> # If no exception is raised by validate(), the instance is valid. + >>> validate(instance={"name" : "Eggs", "price" : 34.99}, schema=schema) + + >>> validate( + ... instance={"name" : "Eggs", "price" : "Invalid"}, schema=schema, + ... ) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValidationError: 'Invalid' is not of type 'number' + +It can also be used from console: + +.. code-block:: bash + + $ jsonschema --instance sample.json sample.schema + +Features +-------- + +* Partial support for + `Draft 2020-12 `_ and + `Draft 2019-09 `_, + except for ``dynamicRef`` / ``recursiveRef`` and ``$vocabulary`` (in-progress). + Full support for + `Draft 7 `_, + `Draft 6 `_, + `Draft 4 `_ + and + `Draft 3 `_ + +* `Lazy validation `_ + that can iteratively report *all* validation errors. + +* `Programmatic querying `_ + of which properties or items failed validation. + + +Installation +------------ + +``jsonschema`` is available on `PyPI `_. You can install using `pip `_: + +.. code-block:: bash + + $ pip install jsonschema + + +Running the Test Suite +---------------------- + +If you have ``tox`` installed (perhaps via ``pip install tox`` or your +package manager), running ``tox`` in the directory of your source +checkout will run ``jsonschema``'s test suite on all of the versions +of Python ``jsonschema`` supports. If you don't have all of the +versions that ``jsonschema`` is tested under, you'll likely want to run +using ``tox``'s ``--skip-missing-interpreters`` option. + +Of course you're also free to just run the tests on a single version with your +favorite test runner. The tests live in the ``jsonschema.tests`` package. + + +Benchmarks +---------- + +``jsonschema``'s benchmarks make use of `pyperf +`_. Running them can be done via:: + + $ tox -e perf + + +Community +--------- + +The JSON Schema specification has `a Slack +`_, with an `invite link on its home page +`_. Many folks knowledgeable on authoring +schemas can be found there. + +Otherwise, asking questions on Stack Overflow is another means of +getting help if you're stuck. + +Contributing +------------ + +I'm Julian Berman. + +``jsonschema`` is on `GitHub `_. + +Get in touch, via GitHub or otherwise, if you've got something to contribute, +it'd be most welcome! + +You can also generally find me on Libera (nick: ``Julian``) in various +channels, including ``#python``. + +If you feel overwhelmingly grateful, you can also `sponsor me +`_. + +And for companies who appreciate ``jsonschema`` and its continued support +and growth, ``jsonschema`` is also now supportable via `TideLift +`_. + + diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/RECORD new file mode 100644 index 0000000..ac59c21 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/RECORD @@ -0,0 +1,69 @@ +../../../bin/jsonschema,sha256=IbWdOSBfFVSYQjk4YdguFPkLIQ4CisB4fZxzbWe9PR4,266 +jsonschema-4.4.0.dist-info/COPYING,sha256=T5KgFaE8TRoEC-8BiqE0MLTxvHO0Gxa7hGw0Z2bedDk,1057 +jsonschema-4.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jsonschema-4.4.0.dist-info/METADATA,sha256=mf-oc4RNWKinypj-AgQJwf17Ejw4Jiu9mH1JDPc3LUM,7539 +jsonschema-4.4.0.dist-info/RECORD,, +jsonschema-4.4.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +jsonschema-4.4.0.dist-info/entry_points.txt,sha256=KaVUBBSLyzi5naUkVg-r3q6T_igdLgaHY6Mm3oLX73s,52 +jsonschema-4.4.0.dist-info/top_level.txt,sha256=jGoNS61vDONU8U7p0Taf-y_8JVG1Z2CJ5Eif6zMN_cw,11 +jsonschema/__init__.py,sha256=h0l2RPVM9kimU7-jTSKoEnguV3QGvrrQvlnJN3F6UPk,1561 +jsonschema/__main__.py,sha256=Sfz1ZNeogymj_KZxq6JXY3F6O_1v28sLIiskusifQ5s,40 +jsonschema/__pycache__/__init__.cpython-39.pyc,, +jsonschema/__pycache__/__main__.cpython-39.pyc,, +jsonschema/__pycache__/_format.cpython-39.pyc,, +jsonschema/__pycache__/_legacy_validators.cpython-39.pyc,, +jsonschema/__pycache__/_reflect.cpython-39.pyc,, +jsonschema/__pycache__/_types.cpython-39.pyc,, +jsonschema/__pycache__/_utils.cpython-39.pyc,, +jsonschema/__pycache__/_validators.cpython-39.pyc,, +jsonschema/__pycache__/cli.cpython-39.pyc,, +jsonschema/__pycache__/exceptions.cpython-39.pyc,, +jsonschema/__pycache__/protocols.cpython-39.pyc,, +jsonschema/__pycache__/validators.cpython-39.pyc,, +jsonschema/_format.py,sha256=MqdmiZPvQcseyH28byggqxnTUGB52oP9X1jiT5yVwDw,13156 +jsonschema/_legacy_validators.py,sha256=-LlXuPD8n1vUI4PUxhLp5xMLPXQNl7PiL3KYQmulyco,7199 +jsonschema/_reflect.py,sha256=qrE9u6y_d7MRIXWReN3Kiwkyytm3lQh6Pfdj9qvrbaY,4859 +jsonschema/_types.py,sha256=_NDm3OxdPPWAqBSpfo4QVEA_oqfKMACg1QslVx0S900,5364 +jsonschema/_utils.py,sha256=JsFatTW-dPS7V4H5Xdn9aw15HlNlSxvaO3iTsFqWs_Y,10415 +jsonschema/_validators.py,sha256=bRgXtl4UpD5lmy5qZOtwe92IJC-_2BbUx8oZzKDw4zE,15434 +jsonschema/benchmarks/__init__.py,sha256=A0sQrxDBVHSyQ-8ru3L11hMXf3q9gVuB9x_YgHb4R9M,70 +jsonschema/benchmarks/__pycache__/__init__.cpython-39.pyc,, +jsonschema/benchmarks/__pycache__/issue232.cpython-39.pyc,, +jsonschema/benchmarks/__pycache__/json_schema_test_suite.cpython-39.pyc,, +jsonschema/benchmarks/issue232.py,sha256=r_V1CaY1rLHP0UCxoEeQhZe5kwQBkdQYdPKmxCj7DbE,495 +jsonschema/benchmarks/json_schema_test_suite.py,sha256=PvfabpUYcF4_7csYDTcTauED8rnFEGYbdY5RqTXD08s,320 +jsonschema/cli.py,sha256=ldAuYYfY9OvQLnAT5PEQrGQn7-fBy_rSY_V9ZesjP8g,8136 +jsonschema/exceptions.py,sha256=utvZjE7HBABp7w5XXWie0EksGpmKD-Hb2yfdOQ93eMM,10268 +jsonschema/protocols.py,sha256=le6gCn2Zr-j8RuGuI1mF78s483PWUaGcOG3GhnChv20,6012 +jsonschema/schemas/draft2019-09.json,sha256=e3YbPhIfCgyh6ioLjizIVrz4AWBLgmjXG6yqICvAwTs,1785 +jsonschema/schemas/draft2020-12.json,sha256=Qdp29a-3zgYtJI92JGOpL3ykfk4PkFsiS6av7vkd7Q8,2452 +jsonschema/schemas/draft3.json,sha256=2LanCgvBrUT8Eyk37KszzCjFxuOw0UBFOeS-ahb5Crg,2699 +jsonschema/schemas/draft4.json,sha256=d-VZ-zmogXIypnObMGPT_e88TPZ9Zb40jd2-Fuvs9j4,4355 +jsonschema/schemas/draft6.json,sha256=wp386fVINcOgbAOzxdXsDtp3cGVo-cTffPvHVmpRAG0,4437 +jsonschema/schemas/draft7.json,sha256=PVOSCIJhYGxVm2A_OFMpyfGrRbXWZ-uZBodFOwVdQF4,4819 +jsonschema/schemas/vocabularies.json,sha256=SW7oOta6bhkEdVDPBKgvrosztMW_UyKs-s04pgpgXqs,12845 +jsonschema/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jsonschema/tests/__pycache__/__init__.cpython-39.pyc,, +jsonschema/tests/__pycache__/_helpers.cpython-39.pyc,, +jsonschema/tests/__pycache__/_suite.cpython-39.pyc,, +jsonschema/tests/__pycache__/fuzz_validate.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_cli.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_deprecations.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_exceptions.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_format.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_jsonschema_test_suite.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_types.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_utils.cpython-39.pyc,, +jsonschema/tests/__pycache__/test_validators.cpython-39.pyc,, +jsonschema/tests/_helpers.py,sha256=3c-b9CK0cdGfhtuUhzM1AjtqPtR2VFvfcKC6G2g0a-0,157 +jsonschema/tests/_suite.py,sha256=1uc_lOHcwxyfyL7DujRQMzPg3xvoQoVkg15ks3RwCjk,6482 +jsonschema/tests/fuzz_validate.py,sha256=GeNlFQepS7ax7Sh90iISVYQXjUkPCUF0c20jEPgPx8s,1085 +jsonschema/tests/test_cli.py,sha256=y52uBGTEgab6IhnTLSaA94xUTLLp3OKSQiy3qtiRMCQ,28674 +jsonschema/tests/test_deprecations.py,sha256=paMq3Hd33zDfVsJpTd95MAOzI6y7IoUQ5brgp9qqVdU,3901 +jsonschema/tests/test_exceptions.py,sha256=WOFFmvp9l9OgCR-bPx_VkLifuNNn7xnPeqpqk7Tjxf8,15700 +jsonschema/tests/test_format.py,sha256=Gu4are4xUyRQc8YL0z-RlDOIc9_96ISv83hZRf8R2t0,3763 +jsonschema/tests/test_jsonschema_test_suite.py,sha256=5Ej98xJe61PBw8uwanoY_D5zMY5wivy3dOjBn38t9uc,13605 +jsonschema/tests/test_types.py,sha256=DyvSKPtuaIu93Lkde80PkJkNOKgvCbaDYAfHz0yxyL0,6803 +jsonschema/tests/test_utils.py,sha256=lJRVYyQeZQTUCTU_M3BhlkxPMgjsc8KQCd7U_Qkook8,3749 +jsonschema/tests/test_validators.py,sha256=qkNF5FSMqB9nDSrmsuckOx-MOGDXTUTDzdtODwjnznE,73618 +jsonschema/validators.py,sha256=iJrjNm6J-6yyQBSu85HCfDyftrn-4loltqErfIcFtRk,34163 diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/entry_points.txt new file mode 100644 index 0000000..c627b31 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +jsonschema = jsonschema.cli:main + diff --git a/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..d89304b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema-4.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jsonschema diff --git a/.venv/lib/python3.9/site-packages/jsonschema/__init__.py b/.venv/lib/python3.9/site-packages/jsonschema/__init__.py new file mode 100644 index 0000000..75f2946 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/__init__.py @@ -0,0 +1,58 @@ +""" +An implementation of JSON Schema for Python + +The main functionality is provided by the validator classes for each of the +supported JSON Schema versions. + +Most commonly, `validate` is the quickest way to simply validate a given +instance under a schema, and will create a validator for you. +""" +import warnings + +from jsonschema._format import ( + FormatChecker, + draft3_format_checker, + draft4_format_checker, + draft6_format_checker, + draft7_format_checker, + draft201909_format_checker, + draft202012_format_checker, +) +from jsonschema._types import TypeChecker +from jsonschema.exceptions import ( + ErrorTree, + FormatError, + RefResolutionError, + SchemaError, + ValidationError, +) +from jsonschema.protocols import Validator +from jsonschema.validators import ( + Draft3Validator, + Draft4Validator, + Draft6Validator, + Draft7Validator, + Draft201909Validator, + Draft202012Validator, + RefResolver, + validate, +) + + +def __getattr__(name): + if name == "__version__": + warnings.warn( + "Accessing jsonschema.__version__ is deprecated and will be " + "removed in a future release. Use importlib.metadata directly " + "to query for jsonschema's version.", + DeprecationWarning, + stacklevel=2, + ) + + try: + from importlib import metadata + except ImportError: + import importlib_metadata as metadata + + return metadata.version("jsonschema") + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/.venv/lib/python3.9/site-packages/jsonschema/__main__.py b/.venv/lib/python3.9/site-packages/jsonschema/__main__.py new file mode 100644 index 0000000..fdc21e2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/__main__.py @@ -0,0 +1,3 @@ +from jsonschema.cli import main + +main() diff --git a/.venv/lib/python3.9/site-packages/jsonschema/_format.py b/.venv/lib/python3.9/site-packages/jsonschema/_format.py new file mode 100644 index 0000000..5f99c65 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/_format.py @@ -0,0 +1,482 @@ +from __future__ import annotations + +from contextlib import suppress +from uuid import UUID +import datetime +import ipaddress +import re +import typing + +from jsonschema.exceptions import FormatError + + +class FormatChecker(object): + """ + A ``format`` property checker. + + JSON Schema does not mandate that the ``format`` property actually do any + validation. If validation is desired however, instances of this class can + be hooked into validators to enable format validation. + + `FormatChecker` objects always return ``True`` when asked about + formats that they do not know how to validate. + + To check a custom format using a function that takes an instance and + returns a ``bool``, use the `FormatChecker.checks` or + `FormatChecker.cls_checks` decorators. + + Arguments: + + formats (~collections.abc.Iterable): + + The known formats to validate. This argument can be used to + limit which formats will be used during validation. + """ + + checkers: dict[ + str, + tuple[ + typing.Callable[[typing.Any], bool], + Exception | tuple[Exception, ...], + ], + ] = {} + + def __init__(self, formats=None): + if formats is None: + self.checkers = self.checkers.copy() + else: + self.checkers = dict((k, self.checkers[k]) for k in formats) + + def __repr__(self): + return "".format(sorted(self.checkers)) + + def checks(self, format, raises=()): + """ + Register a decorated function as validating a new format. + + Arguments: + + format (str): + + The format that the decorated function will check. + + raises (Exception): + + The exception(s) raised by the decorated function when an + invalid instance is found. + + The exception object will be accessible as the + `jsonschema.exceptions.ValidationError.cause` attribute of the + resulting validation error. + """ + + def _checks(func): + self.checkers[format] = (func, raises) + return func + return _checks + + cls_checks = classmethod(checks) + + def check(self, instance, format): + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format (str): + + The format that instance should conform to + + + Raises: + + FormatError: if the instance does not conform to ``format`` + """ + + if format not in self.checkers: + return + + func, raises = self.checkers[format] + result, cause = None, None + try: + result = func(instance) + except raises as e: + cause = e + if not result: + raise FormatError(f"{instance!r} is not a {format!r}", cause=cause) + + def conforms(self, instance, format): + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format (str): + + The format that instance should conform to + + Returns: + + bool: whether it conformed + """ + + try: + self.check(instance, format) + except FormatError: + return False + else: + return True + + +draft3_format_checker = FormatChecker() +draft4_format_checker = FormatChecker() +draft6_format_checker = FormatChecker() +draft7_format_checker = FormatChecker() +draft201909_format_checker = FormatChecker() +draft202012_format_checker = FormatChecker() + +_draft_checkers = dict( + draft3=draft3_format_checker, + draft4=draft4_format_checker, + draft6=draft6_format_checker, + draft7=draft7_format_checker, + draft201909=draft201909_format_checker, + draft202012=draft202012_format_checker, +) + + +def _checks_drafts( + name=None, + draft3=None, + draft4=None, + draft6=None, + draft7=None, + draft201909=None, + draft202012=None, + raises=(), +): + draft3 = draft3 or name + draft4 = draft4 or name + draft6 = draft6 or name + draft7 = draft7 or name + draft201909 = draft201909 or name + draft202012 = draft202012 or name + + def wrap(func): + if draft3: + func = _draft_checkers["draft3"].checks(draft3, raises)(func) + if draft4: + func = _draft_checkers["draft4"].checks(draft4, raises)(func) + if draft6: + func = _draft_checkers["draft6"].checks(draft6, raises)(func) + if draft7: + func = _draft_checkers["draft7"].checks(draft7, raises)(func) + if draft201909: + func = _draft_checkers["draft201909"].checks(draft201909, raises)( + func, + ) + if draft202012: + func = _draft_checkers["draft202012"].checks(draft202012, raises)( + func, + ) + + # Oy. This is bad global state, but relied upon for now, until + # deprecation. See https://github.com/Julian/jsonschema/issues/519 + # and test_format_checkers_come_with_defaults + FormatChecker.cls_checks( + draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3, + raises, + )(func) + return func + return wrap + + +@_checks_drafts(name="idn-email") +@_checks_drafts(name="email") +def is_email(instance): + if not isinstance(instance, str): + return True + return "@" in instance + + +@_checks_drafts( + draft3="ip-address", + draft4="ipv4", + draft6="ipv4", + draft7="ipv4", + draft201909="ipv4", + draft202012="ipv4", + raises=ipaddress.AddressValueError, +) +def is_ipv4(instance): + if not isinstance(instance, str): + return True + return ipaddress.IPv4Address(instance) + + +@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError) +def is_ipv6(instance): + if not isinstance(instance, str): + return True + address = ipaddress.IPv6Address(instance) + return not getattr(address, "scope_id", "") + + +with suppress(ImportError): + from fqdn import FQDN + + @_checks_drafts( + draft3="host-name", + draft4="hostname", + draft6="hostname", + draft7="hostname", + draft201909="hostname", + draft202012="hostname", + ) + def is_host_name(instance): + if not isinstance(instance, str): + return True + return FQDN(instance).is_valid + + +with suppress(ImportError): + # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. + import idna + + @_checks_drafts( + draft7="idn-hostname", + draft201909="idn-hostname", + draft202012="idn-hostname", + raises=(idna.IDNAError, UnicodeError), + ) + def is_idn_host_name(instance): + if not isinstance(instance, str): + return True + idna.encode(instance) + return True + + +try: + import rfc3987 +except ImportError: + with suppress(ImportError): + from rfc3986_validator import validate_rfc3986 + + @_checks_drafts(name="uri") + def is_uri(instance): + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance): + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI_reference") + +else: + @_checks_drafts( + draft7="iri", + draft201909="iri", + draft202012="iri", + raises=ValueError, + ) + def is_iri(instance): + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI") + + @_checks_drafts( + draft7="iri-reference", + draft201909="iri-reference", + draft202012="iri-reference", + raises=ValueError, + ) + def is_iri_reference(instance): + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI_reference") + + @_checks_drafts(name="uri", raises=ValueError) + def is_uri(instance): + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance): + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI_reference") + +with suppress(ImportError): + from rfc3339_validator import validate_rfc3339 + + @_checks_drafts(name="date-time") + def is_datetime(instance): + if not isinstance(instance, str): + return True + return validate_rfc3339(instance.upper()) + + @_checks_drafts( + draft7="time", + draft201909="time", + draft202012="time", + ) + def is_time(instance): + if not isinstance(instance, str): + return True + return is_datetime("1970-01-01T" + instance) + + +@_checks_drafts(name="regex", raises=re.error) +def is_regex(instance): + if not isinstance(instance, str): + return True + return re.compile(instance) + + +@_checks_drafts( + draft3="date", + draft7="date", + draft201909="date", + draft202012="date", + raises=ValueError, +) +def is_date(instance): + if not isinstance(instance, str): + return True + return instance.isascii() and datetime.date.fromisoformat(instance) + + +@_checks_drafts(draft3="time", raises=ValueError) +def is_draft3_time(instance): + if not isinstance(instance, str): + return True + return datetime.datetime.strptime(instance, "%H:%M:%S") + + +with suppress(ImportError): + from webcolors import CSS21_NAMES_TO_HEX + import webcolors + + def is_css_color_code(instance): + return webcolors.normalize_hex(instance) + + @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) + def is_css21_color(instance): + if ( + not isinstance(instance, str) + or instance.lower() in CSS21_NAMES_TO_HEX + ): + return True + return is_css_color_code(instance) + + +with suppress(ImportError): + import jsonpointer + + @_checks_drafts( + draft6="json-pointer", + draft7="json-pointer", + draft201909="json-pointer", + draft202012="json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_json_pointer(instance): + if not isinstance(instance, str): + return True + return jsonpointer.JsonPointer(instance) + + # TODO: I don't want to maintain this, so it + # needs to go either into jsonpointer (pending + # https://github.com/stefankoegl/python-json-pointer/issues/34) or + # into a new external library. + @_checks_drafts( + draft7="relative-json-pointer", + draft201909="relative-json-pointer", + draft202012="relative-json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_relative_json_pointer(instance): + # Definition taken from: + # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 + if not isinstance(instance, str): + return True + non_negative_integer, rest = [], "" + for i, character in enumerate(instance): + if character.isdigit(): + # digits with a leading "0" are not allowed + if i > 0 and int(instance[i - 1]) == 0: + return False + + non_negative_integer.append(character) + continue + + if not non_negative_integer: + return False + + rest = instance[i:] + break + return (rest == "#") or jsonpointer.JsonPointer(rest) + + +with suppress(ImportError): + import uri_template + + @_checks_drafts( + draft6="uri-template", + draft7="uri-template", + draft201909="uri-template", + draft202012="uri-template", + ) + def is_uri_template(instance): + if not isinstance(instance, str): + return True + return uri_template.validate(instance) + + +with suppress(ImportError): + import isoduration + + @_checks_drafts( + draft201909="duration", + draft202012="duration", + raises=isoduration.DurationParsingException, + ) + def is_duration(instance): + if not isinstance(instance, str): + return True + return isoduration.parse_duration(instance) + + +@_checks_drafts( + draft201909="uuid", + draft202012="uuid", + raises=ValueError, +) +def is_uuid(instance): + if not isinstance(instance, str): + return True + UUID(instance) + return all(instance[position] == "-" for position in (8, 13, 18, 23)) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/_legacy_validators.py b/.venv/lib/python3.9/site-packages/jsonschema/_legacy_validators.py new file mode 100644 index 0000000..c8eff2c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/_legacy_validators.py @@ -0,0 +1,224 @@ +from jsonschema import _utils +from jsonschema.exceptions import ValidationError + + +def ignore_ref_siblings(schema): + """ + Ignore siblings of ``$ref`` if it is present. + + Otherwise, return all validators. + + Suitable for use with `create`'s ``applicable_validators`` argument. + """ + ref = schema.get("$ref") + if ref is not None: + return [("$ref", ref)] + else: + return schema.items() + + +def dependencies_draft3(validator, dependencies, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "object"): + yield from validator.descend( + instance, dependency, schema_path=property, + ) + elif validator.is_type(dependency, "string"): + if dependency not in instance: + message = f"{dependency!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependencies_draft4_draft6_draft7( + validator, + dependencies, + instance, + schema, +): + """ + Support for the ``dependencies`` validator from pre-draft 2019-09. + + In later drafts, the validator was split into separate + ``dependentRequired`` and ``dependentSchemas`` validators. + """ + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "array"): + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def disallow_draft3(validator, disallow, instance, schema): + for disallowed in _utils.ensure_list(disallow): + if validator.evolve(schema={"type": [disallowed]}).is_valid(instance): + message = f"{disallowed!r} is disallowed for {instance!r}" + yield ValidationError(message) + + +def extends_draft3(validator, extends, instance, schema): + if validator.is_type(extends, "object"): + yield from validator.descend(instance, extends) + return + for index, subschema in enumerate(extends): + yield from validator.descend(instance, subschema, schema_path=index) + + +def items_draft3_draft4(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "object"): + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + else: + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + + +def items_draft6_draft7_draft201909(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "array"): + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + else: + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + + +def minimum_draft3_draft4(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMinimum", False): + failed = instance <= minimum + cmp = "less than or equal to" + else: + failed = instance < minimum + cmp = "less than" + + if failed: + message = f"{instance!r} is {cmp} the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum_draft3_draft4(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMaximum", False): + failed = instance >= maximum + cmp = "greater than or equal to" + else: + failed = instance > maximum + cmp = "greater than" + + if failed: + message = f"{instance!r} is {cmp} the maximum of {maximum!r}" + yield ValidationError(message) + + +def properties_draft3(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + elif subschema.get("required", False): + error = ValidationError(f"{property!r} is a required property") + error._set( + validator="required", + validator_value=subschema["required"], + instance=instance, + schema=schema, + ) + error.path.appendleft(property) + error.schema_path.extend([property, "required"]) + yield error + + +def type_draft3(validator, types, instance, schema): + types = _utils.ensure_list(types) + + all_errors = [] + for index, type in enumerate(types): + if validator.is_type(type, "object"): + errors = list(validator.descend(instance, type, schema_path=index)) + if not errors: + return + all_errors.extend(errors) + else: + if validator.is_type(instance, type): + return + else: + reprs = [] + for type in types: + try: + reprs.append(repr(type["name"])) + except Exception: + reprs.append(repr(type)) + yield ValidationError( + f"{instance!r} is not of type {', '.join(reprs)}", + context=all_errors, + ) + + +def contains_draft6_draft7(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + if not any( + validator.evolve(schema=contains).is_valid(element) + for element in instance + ): + yield ValidationError( + f"None of {instance!r} are valid under the given schema", + ) + + +def recursiveRef(validator, recursiveRef, instance, schema): + lookup_url, target = validator.resolver.resolution_scope, validator.schema + + for each in reversed(validator.resolver._scopes_stack[1:]): + lookup_url, next_target = validator.resolver.resolve(each) + if next_target.get("$recursiveAnchor"): + target = next_target + else: + break + + fragment = recursiveRef.lstrip("#") + subschema = validator.resolver.resolve_fragment(target, fragment) + yield from validator.descend(instance, subschema) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/_reflect.py b/.venv/lib/python3.9/site-packages/jsonschema/_reflect.py new file mode 100644 index 0000000..39ee7a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/_reflect.py @@ -0,0 +1,149 @@ +# -*- test-case-name: twisted.test.test_reflect -*- +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +Standardized versions of various cool and/or strange things that you can do +with Python's reflection capabilities. +""" + +import sys + + +class _NoModuleFound(Exception): + """ + No module was found because none exists. + """ + + + +class InvalidName(ValueError): + """ + The given name is not a dot-separated list of Python objects. + """ + + + +class ModuleNotFound(InvalidName): + """ + The module associated with the given name doesn't exist and it can't be + imported. + """ + + + +class ObjectNotFound(InvalidName): + """ + The object associated with the given name doesn't exist and it can't be + imported. + """ + + + +def reraise(exception, traceback): + raise exception.with_traceback(traceback) + +reraise.__doc__ = """ +Re-raise an exception, with an optional traceback, in a way that is compatible +with both Python 2 and Python 3. + +Note that on Python 3, re-raised exceptions will be mutated, with their +C{__traceback__} attribute being set. + +@param exception: The exception instance. +@param traceback: The traceback to use, or C{None} indicating a new traceback. +""" + + +def _importAndCheckStack(importName): + """ + Import the given name as a module, then walk the stack to determine whether + the failure was the module not existing, or some code in the module (for + example a dependent import) failing. This can be helpful to determine + whether any actual application code was run. For example, to distiguish + administrative error (entering the wrong module name), from programmer + error (writing buggy code in a module that fails to import). + + @param importName: The name of the module to import. + @type importName: C{str} + @raise Exception: if something bad happens. This can be any type of + exception, since nobody knows what loading some arbitrary code might + do. + @raise _NoModuleFound: if no module was found. + """ + try: + return __import__(importName) + except ImportError: + excType, excValue, excTraceback = sys.exc_info() + while excTraceback: + execName = excTraceback.tb_frame.f_globals["__name__"] + # in Python 2 execName is None when an ImportError is encountered, + # where in Python 3 execName is equal to the importName. + if execName is None or execName == importName: + reraise(excValue, excTraceback) + excTraceback = excTraceback.tb_next + raise _NoModuleFound() + + + +def namedAny(name): + """ + Retrieve a Python object by its fully qualified name from the global Python + module namespace. The first part of the name, that describes a module, + will be discovered and imported. Each subsequent part of the name is + treated as the name of an attribute of the object specified by all of the + name which came before it. For example, the fully-qualified name of this + object is 'twisted.python.reflect.namedAny'. + + @type name: L{str} + @param name: The name of the object to return. + + @raise InvalidName: If the name is an empty string, starts or ends with + a '.', or is otherwise syntactically incorrect. + + @raise ModuleNotFound: If the name is syntactically correct but the + module it specifies cannot be imported because it does not appear to + exist. + + @raise ObjectNotFound: If the name is syntactically correct, includes at + least one '.', but the module it specifies cannot be imported because + it does not appear to exist. + + @raise AttributeError: If an attribute of an object along the way cannot be + accessed, or a module along the way is not found. + + @return: the Python object identified by 'name'. + """ + if not name: + raise InvalidName('Empty module name') + + names = name.split('.') + + # if the name starts or ends with a '.' or contains '..', the __import__ + # will raise an 'Empty module name' error. This will provide a better error + # message. + if '' in names: + raise InvalidName( + "name must be a string giving a '.'-separated list of Python " + "identifiers, not %r" % (name,)) + + topLevelPackage = None + moduleNames = names[:] + while not topLevelPackage: + if moduleNames: + trialname = '.'.join(moduleNames) + try: + topLevelPackage = _importAndCheckStack(trialname) + except _NoModuleFound: + moduleNames.pop() + else: + if len(names) == 1: + raise ModuleNotFound("No module named %r" % (name,)) + else: + raise ObjectNotFound('%r does not name an object' % (name,)) + + obj = topLevelPackage + for n in names[1:]: + obj = getattr(obj, n) + + return obj diff --git a/.venv/lib/python3.9/site-packages/jsonschema/_types.py b/.venv/lib/python3.9/site-packages/jsonschema/_types.py new file mode 100644 index 0000000..9d59eb3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/_types.py @@ -0,0 +1,217 @@ +from __future__ import annotations + +import numbers +import typing + +from pyrsistent import pmap +import attr + +from jsonschema.exceptions import UndefinedTypeCheck + + +# unfortunately, the type of pmap is generic, and if used as the attr.ib +# converter, the generic type is presented to mypy, which then fails to match +# the concrete type of a type checker mapping +# this "do nothing" wrapper presents the correct information to mypy +def _typed_pmap_converter( + init_val: typing.Mapping[ + str, + typing.Callable[["TypeChecker", typing.Any], bool], + ], +) -> typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool]]: + return typing.cast( + typing.Mapping[ + str, + typing.Callable[["TypeChecker", typing.Any], bool], + ], + pmap(init_val), + ) + + +def is_array(checker, instance): + return isinstance(instance, list) + + +def is_bool(checker, instance): + return isinstance(instance, bool) + + +def is_integer(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, int) + + +def is_null(checker, instance): + return instance is None + + +def is_number(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, numbers.Number) + + +def is_object(checker, instance): + return isinstance(instance, dict) + + +def is_string(checker, instance): + return isinstance(instance, str) + + +def is_any(checker, instance): + return True + + +@attr.s(frozen=True) +class TypeChecker(object): + """ + A ``type`` property checker. + + A `TypeChecker` performs type checking for a `Validator`. Type + checks to perform are updated using `TypeChecker.redefine` or + `TypeChecker.redefine_many` and removed via `TypeChecker.remove`. + Each of these return a new `TypeChecker` object. + + Arguments: + + type_checkers (dict): + + The initial mapping of types to their checking functions. + """ + + _type_checkers: typing.Mapping[ + str, typing.Callable[["TypeChecker", typing.Any], bool], + ] = attr.ib( + default=pmap(), + converter=_typed_pmap_converter, + ) + + def is_type(self, instance, type): + """ + Check if the instance is of the appropriate type. + + Arguments: + + instance (object): + + The instance to check + + type (str): + + The name of the type that is expected. + + Returns: + + bool: Whether it conformed. + + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + if type is unknown to this object. + """ + try: + fn = self._type_checkers[type] + except KeyError: + raise UndefinedTypeCheck(type) from None + + return fn(self, instance) + + def redefine(self, type, fn): + """ + Produce a new checker with the given type redefined. + + Arguments: + + type (str): + + The name of the type to check. + + fn (collections.abc.Callable): + + A function taking exactly two parameters - the type + checker calling the function and the instance to check. + The function should return true if instance is of this + type and false otherwise. + + Returns: + + A new `TypeChecker` instance. + """ + return self.redefine_many({type: fn}) + + def redefine_many(self, definitions=()): + """ + Produce a new checker with the given types redefined. + + Arguments: + + definitions (dict): + + A dictionary mapping types to their checking functions. + + Returns: + + A new `TypeChecker` instance. + """ + return attr.evolve( + self, type_checkers=self._type_checkers.update(definitions), + ) + + def remove(self, *types): + """ + Produce a new checker with the given types forgotten. + + Arguments: + + types (~collections.abc.Iterable): + + the names of the types to remove. + + Returns: + + A new `TypeChecker` instance + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + + if any given type is unknown to this object + """ + + checkers = self._type_checkers + for each in types: + try: + checkers = checkers.remove(each) + except KeyError: + raise UndefinedTypeCheck(each) + return attr.evolve(self, type_checkers=checkers) + + +draft3_type_checker = TypeChecker( + { + "any": is_any, + "array": is_array, + "boolean": is_bool, + "integer": is_integer, + "object": is_object, + "null": is_null, + "number": is_number, + "string": is_string, + }, +) +draft4_type_checker = draft3_type_checker.remove("any") +draft6_type_checker = draft4_type_checker.redefine( + "integer", + lambda checker, instance: ( + is_integer(checker, instance) + or isinstance(instance, float) and instance.is_integer() + ), +) +draft7_type_checker = draft6_type_checker +draft201909_type_checker = draft7_type_checker +draft202012_type_checker = draft201909_type_checker diff --git a/.venv/lib/python3.9/site-packages/jsonschema/_utils.py b/.venv/lib/python3.9/site-packages/jsonschema/_utils.py new file mode 100644 index 0000000..c66b07d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/_utils.py @@ -0,0 +1,348 @@ +from collections.abc import Mapping, MutableMapping, Sequence +from urllib.parse import urlsplit +import itertools +import json +import re +import sys + +# The files() API was added in Python 3.9. +if sys.version_info >= (3, 9): # pragma: no cover + from importlib import resources +else: # pragma: no cover + import importlib_resources as resources # type: ignore + + +class URIDict(MutableMapping): + """ + Dictionary which uses normalized URIs as keys. + """ + + def normalize(self, uri): + return urlsplit(uri).geturl() + + def __init__(self, *args, **kwargs): + self.store = dict() + self.store.update(*args, **kwargs) + + def __getitem__(self, uri): + return self.store[self.normalize(uri)] + + def __setitem__(self, uri, value): + self.store[self.normalize(uri)] = value + + def __delitem__(self, uri): + del self.store[self.normalize(uri)] + + def __iter__(self): + return iter(self.store) + + def __len__(self): + return len(self.store) + + def __repr__(self): + return repr(self.store) + + +class Unset(object): + """ + An as-of-yet unset attribute or unprovided default parameter. + """ + + def __repr__(self): + return "" + + +def load_schema(name): + """ + Load a schema from ./schemas/``name``.json and return it. + """ + + path = resources.files(__package__).joinpath(f"schemas/{name}.json") + data = path.read_text(encoding="utf-8") + return json.loads(data) + + +def format_as_index(container, indices): + """ + Construct a single string containing indexing operations for the indices. + + For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] + + Arguments: + + container (str): + + A word to use for the thing being indexed + + indices (sequence): + + The indices to format. + """ + + if not indices: + return container + return f"{container}[{']['.join(repr(index) for index in indices)}]" + + +def find_additional_properties(instance, schema): + """ + Return the set of additional properties for the given ``instance``. + + Weeds out properties that should have been validated by ``properties`` and + / or ``patternProperties``. + + Assumes ``instance`` is dict-like already. + """ + + properties = schema.get("properties", {}) + patterns = "|".join(schema.get("patternProperties", {})) + for property in instance: + if property not in properties: + if patterns and re.search(patterns, property): + continue + yield property + + +def extras_msg(extras): + """ + Create an error message for extra items or properties. + """ + + if len(extras) == 1: + verb = "was" + else: + verb = "were" + return ", ".join(repr(extra) for extra in extras), verb + + +def ensure_list(thing): + """ + Wrap ``thing`` in a list if it's a single str. + + Otherwise, return it unchanged. + """ + + if isinstance(thing, str): + return [thing] + return thing + + +def _mapping_equal(one, two): + """ + Check if two mappings are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all( + key in two and equal(value, two[key]) + for key, value in one.items() + ) + + +def _sequence_equal(one, two): + """ + Check if two sequences are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all(equal(i, j) for i, j in zip(one, two)) + + +def equal(one, two): + """ + Check if two things are equal evading some Python type hierarchy semantics. + + Specifically in JSON Schema, evade `bool` inheriting from `int`, + recursing into sequences to do the same. + """ + if isinstance(one, str) or isinstance(two, str): + return one == two + if isinstance(one, Sequence) and isinstance(two, Sequence): + return _sequence_equal(one, two) + if isinstance(one, Mapping) and isinstance(two, Mapping): + return _mapping_equal(one, two) + return unbool(one) == unbool(two) + + +def unbool(element, true=object(), false=object()): + """ + A hack to make True and 1 and False and 0 unique for ``uniq``. + """ + + if element is True: + return true + elif element is False: + return false + return element + + +def uniq(container): + """ + Check if all of a container's elements are unique. + + Tries to rely on the container being recursively sortable, or otherwise + falls back on (slow) brute force. + """ + try: + sort = sorted(unbool(i) for i in container) + sliced = itertools.islice(sort, 1, None) + + for i, j in zip(sort, sliced): + if equal(i, j): + return False + + except (NotImplementedError, TypeError): + seen = [] + for e in container: + e = unbool(e) + + for i in seen: + if equal(i, e): + return False + + seen.append(e) + return True + + +def find_evaluated_item_indexes_by_schema(validator, instance, schema): + """ + Get all indexes of items that get evaluated under the current schema + + Covers all keywords related to unevaluatedItems: items, prefixItems, if, + then, else, contains, unevaluatedItems, allOf, oneOf, anyOf + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_indexes = [] + + if "items" in schema: + return list(range(0, len(instance))) + + if "$ref" in schema: + scope, resolved = validator.resolver.resolve(schema["$ref"]) + validator.resolver.push_scope(scope) + + try: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, resolved) + finally: + validator.resolver.pop_scope() + + if "prefixItems" in schema: + evaluated_indexes += list(range(0, len(schema["prefixItems"]))) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["then"], + ) + else: + if "else" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["else"], + ) + + for keyword in ["contains", "unevaluatedItems"]: + if keyword in schema: + for k, v in enumerate(instance): + if validator.evolve(schema=schema[keyword]).is_valid(v): + evaluated_indexes.append(k) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = list(validator.descend(instance, subschema)) + if not errs: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, subschema, + ) + + return evaluated_indexes + + +def find_evaluated_property_keys_by_schema(validator, instance, schema): + """ + Get all keys of items that get evaluated under the current schema + + Covers all keywords related to unevaluatedProperties: properties, + additionalProperties, unevaluatedProperties, patternProperties, + dependentSchemas, allOf, oneOf, anyOf, if, then, else + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_keys = [] + + if "$ref" in schema: + scope, resolved = validator.resolver.resolve(schema["$ref"]) + validator.resolver.push_scope(scope) + + try: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, resolved, + ) + finally: + validator.resolver.pop_scope() + + for keyword in [ + "properties", "additionalProperties", "unevaluatedProperties", + ]: + if keyword in schema: + if validator.is_type(schema[keyword], "boolean"): + for property, value in instance.items(): + if validator.evolve(schema=schema[keyword]).is_valid( + {property: value}, + ): + evaluated_keys.append(property) + + if validator.is_type(schema[keyword], "object"): + for property, subschema in schema[keyword].items(): + if property in instance and validator.evolve( + schema=subschema, + ).is_valid(instance[property]): + evaluated_keys.append(property) + + if "patternProperties" in schema: + for property, value in instance.items(): + for pattern, _ in schema["patternProperties"].items(): + if re.search(pattern, property) and validator.evolve( + schema=schema["patternProperties"], + ).is_valid({property: value}): + evaluated_keys.append(property) + + if "dependentSchemas" in schema: + for property, subschema in schema["dependentSchemas"].items(): + if property not in instance: + continue + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = list(validator.descend(instance, subschema)) + if not errs: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["then"], + ) + else: + if "else" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["else"], + ) + + return evaluated_keys diff --git a/.venv/lib/python3.9/site-packages/jsonschema/_validators.py b/.venv/lib/python3.9/site-packages/jsonschema/_validators.py new file mode 100644 index 0000000..9a07f5e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/_validators.py @@ -0,0 +1,463 @@ +from fractions import Fraction +from urllib.parse import urldefrag, urljoin +import re + +from jsonschema._utils import ( + ensure_list, + equal, + extras_msg, + find_additional_properties, + find_evaluated_item_indexes_by_schema, + find_evaluated_property_keys_by_schema, + unbool, + uniq, +) +from jsonschema.exceptions import FormatError, ValidationError + + +def patternProperties(validator, patternProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for pattern, subschema in patternProperties.items(): + for k, v in instance.items(): + if re.search(pattern, k): + yield from validator.descend( + v, subschema, path=k, schema_path=pattern, + ) + + +def propertyNames(validator, propertyNames, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property in instance: + yield from validator.descend(instance=property, schema=propertyNames) + + +def additionalProperties(validator, aP, instance, schema): + if not validator.is_type(instance, "object"): + return + + extras = set(find_additional_properties(instance, schema)) + + if validator.is_type(aP, "object"): + for extra in extras: + yield from validator.descend(instance[extra], aP, path=extra) + elif not aP and extras: + if "patternProperties" in schema: + if len(extras) == 1: + verb = "does" + else: + verb = "do" + + joined = ", ".join(repr(each) for each in sorted(extras)) + patterns = ", ".join( + repr(each) for each in sorted(schema["patternProperties"]) + ) + error = f"{joined} {verb} not match any of the regexes: {patterns}" + yield ValidationError(error) + else: + error = "Additional properties are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(extras)) + + +def items(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + prefix = len(schema.get("prefixItems", [])) + total = len(instance) + if items is False and total > prefix: + message = f"Expected at most {prefix} items, but found {total}" + yield ValidationError(message) + else: + for index in range(prefix, total): + yield from validator.descend( + instance=instance[index], + schema=items, + path=index, + ) + + +def additionalItems(validator, aI, instance, schema): + if ( + not validator.is_type(instance, "array") + or validator.is_type(schema.get("items", {}), "object") + ): + return + + len_items = len(schema.get("items", [])) + if validator.is_type(aI, "object"): + for index, item in enumerate(instance[len_items:], start=len_items): + yield from validator.descend(item, aI, path=index) + elif not aI and len(instance) > len(schema.get("items", [])): + error = "Additional items are not allowed (%s %s unexpected)" + yield ValidationError( + error % extras_msg(instance[len(schema.get("items", [])):]), + ) + + +def const(validator, const, instance, schema): + if not equal(instance, const): + yield ValidationError(f"{const!r} was expected") + + +def contains(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + matches = 0 + min_contains = schema.get("minContains", 1) + max_contains = schema.get("maxContains", len(instance)) + + for each in instance: + if validator.evolve(schema=contains).is_valid(each): + matches += 1 + if matches > max_contains: + yield ValidationError( + "Too many items match the given schema " + f"(expected at most {max_contains})", + validator="maxContains", + validator_value=max_contains, + ) + return + + if matches < min_contains: + if not matches: + yield ValidationError( + f"{instance!r} does not contain items " + "matching the given schema", + ) + else: + yield ValidationError( + "Too few items match the given schema (expected at least " + f"{min_contains} but only {matches} matched)", + validator="minContains", + validator_value=min_contains, + ) + + +def exclusiveMinimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance <= minimum: + yield ValidationError( + f"{instance!r} is less than or equal to " + f"the minimum of {minimum!r}", + ) + + +def exclusiveMaximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance >= maximum: + yield ValidationError( + f"{instance!r} is greater than or equal " + f"to the maximum of {maximum!r}", + ) + + +def minimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance < minimum: + message = f"{instance!r} is less than the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance > maximum: + message = f"{instance!r} is greater than the maximum of {maximum!r}" + yield ValidationError(message) + + +def multipleOf(validator, dB, instance, schema): + if not validator.is_type(instance, "number"): + return + + if isinstance(dB, float): + quotient = instance / dB + try: + failed = int(quotient) != quotient + except OverflowError: + # When `instance` is large and `dB` is less than one, + # quotient can overflow to infinity; and then casting to int + # raises an error. + # + # In this case we fall back to Fraction logic, which is + # exact and cannot overflow. The performance is also + # acceptable: we try the fast all-float option first, and + # we know that fraction(dB) can have at most a few hundred + # digits in each part. The worst-case slowdown is therefore + # for already-slow enormous integers or Decimals. + failed = (Fraction(instance) / Fraction(dB)).denominator != 1 + else: + failed = instance % dB + + if failed: + yield ValidationError(f"{instance!r} is not a multiple of {dB}") + + +def minItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) < mI: + yield ValidationError(f"{instance!r} is too short") + + +def maxItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) > mI: + yield ValidationError(f"{instance!r} is too long") + + +def uniqueItems(validator, uI, instance, schema): + if ( + uI + and validator.is_type(instance, "array") + and not uniq(instance) + ): + yield ValidationError(f"{instance!r} has non-unique elements") + + +def pattern(validator, patrn, instance, schema): + if ( + validator.is_type(instance, "string") + and not re.search(patrn, instance) + ): + yield ValidationError(f"{instance!r} does not match {patrn!r}") + + +def format(validator, format, instance, schema): + if validator.format_checker is not None: + try: + validator.format_checker.check(instance, format) + except FormatError as error: + yield ValidationError(error.message, cause=error.cause) + + +def minLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) < mL: + yield ValidationError(f"{instance!r} is too short") + + +def maxLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) > mL: + yield ValidationError(f"{instance!r} is too long") + + +def dependentRequired(validator, dependentRequired, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentRequired.items(): + if property not in instance: + continue + + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependentSchemas(validator, dependentSchemas, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentSchemas.items(): + if property not in instance: + continue + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def enum(validator, enums, instance, schema): + if instance == 0 or instance == 1: + unbooled = unbool(instance) + if all(unbooled != unbool(each) for each in enums): + yield ValidationError(f"{instance!r} is not one of {enums!r}") + elif instance not in enums: + yield ValidationError(f"{instance!r} is not one of {enums!r}") + + +def ref(validator, ref, instance, schema): + resolve = getattr(validator.resolver, "resolve", None) + if resolve is None: + with validator.resolver.resolving(ref) as resolved: + yield from validator.descend(instance, resolved) + else: + scope, resolved = validator.resolver.resolve(ref) + validator.resolver.push_scope(scope) + + try: + yield from validator.descend(instance, resolved) + finally: + validator.resolver.pop_scope() + + +def dynamicRef(validator, dynamicRef, instance, schema): + _, fragment = urldefrag(dynamicRef) + + for url in validator.resolver._scopes_stack: + lookup_url = urljoin(url, dynamicRef) + with validator.resolver.resolving(lookup_url) as subschema: + if ("$dynamicAnchor" in subschema + and fragment == subschema["$dynamicAnchor"]): + yield from validator.descend(instance, subschema) + break + else: + with validator.resolver.resolving(dynamicRef) as subschema: + yield from validator.descend(instance, subschema) + + +def type(validator, types, instance, schema): + types = ensure_list(types) + + if not any(validator.is_type(instance, type) for type in types): + reprs = ", ".join(repr(type) for type in types) + yield ValidationError(f"{instance!r} is not of type {reprs}") + + +def properties(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + + +def required(validator, required, instance, schema): + if not validator.is_type(instance, "object"): + return + for property in required: + if property not in instance: + yield ValidationError(f"{property!r} is a required property") + + +def minProperties(validator, mP, instance, schema): + if validator.is_type(instance, "object") and len(instance) < mP: + yield ValidationError(f"{instance!r} does not have enough properties") + + +def maxProperties(validator, mP, instance, schema): + if not validator.is_type(instance, "object"): + return + if validator.is_type(instance, "object") and len(instance) > mP: + yield ValidationError(f"{instance!r} has too many properties") + + +def allOf(validator, allOf, instance, schema): + for index, subschema in enumerate(allOf): + yield from validator.descend(instance, subschema, schema_path=index) + + +def anyOf(validator, anyOf, instance, schema): + all_errors = [] + for index, subschema in enumerate(anyOf): + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + +def oneOf(validator, oneOf, instance, schema): + subschemas = enumerate(oneOf) + all_errors = [] + for index, subschema in subschemas: + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + first_valid = subschema + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + more_valid = [ + each for _, each in subschemas + if validator.evolve(schema=each).is_valid(instance) + ] + if more_valid: + more_valid.append(first_valid) + reprs = ", ".join(repr(schema) for schema in more_valid) + yield ValidationError(f"{instance!r} is valid under each of {reprs}") + + +def not_(validator, not_schema, instance, schema): + if validator.evolve(schema=not_schema).is_valid(instance): + message = f"{instance!r} should not be valid under {not_schema!r}" + yield ValidationError(message) + + +def if_(validator, if_schema, instance, schema): + if validator.evolve(schema=if_schema).is_valid(instance): + if "then" in schema: + then = schema["then"] + yield from validator.descend(instance, then, schema_path="then") + elif "else" in schema: + else_ = schema["else"] + yield from validator.descend(instance, else_, schema_path="else") + + +def unevaluatedItems(validator, unevaluatedItems, instance, schema): + evaluated_item_indexes = find_evaluated_item_indexes_by_schema( + validator, instance, schema, + ) + unevaluated_items = [ + item for index, item in enumerate(instance) + if index not in evaluated_item_indexes + ] + if unevaluated_items: + error = "Unevaluated items are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(unevaluated_items)) + + +def unevaluatedProperties(validator, unevaluatedProperties, instance, schema): + evaluated_property_keys = find_evaluated_property_keys_by_schema( + validator, instance, schema, + ) + unevaluated_property_keys = [] + for property in instance: + if property not in evaluated_property_keys: + for _ in validator.descend( + instance[property], + unevaluatedProperties, + path=property, + schema_path=property, + ): + unevaluated_property_keys.append(property) + + if unevaluated_property_keys: + error = "Unevaluated properties are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(unevaluated_property_keys)) + + +def prefixItems(validator, prefixItems, instance, schema): + if not validator.is_type(instance, "array"): + return + + for (index, item), subschema in zip(enumerate(instance), prefixItems): + yield from validator.descend( + instance=item, + schema=subschema, + schema_path=index, + path=index, + ) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/__init__.py b/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/__init__.py new file mode 100644 index 0000000..e3dcc68 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/__init__.py @@ -0,0 +1,5 @@ +""" +Benchmarks for validation. + +This package is *not* public API. +""" diff --git a/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/issue232.py b/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/issue232.py new file mode 100644 index 0000000..779f522 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/issue232.py @@ -0,0 +1,25 @@ +""" +A performance benchmark using the example from issue #232. + +See https://github.com/Julian/jsonschema/pull/232. +""" +from pathlib import Path + +from pyperf import Runner +from pyrsistent import m + +from jsonschema.tests._suite import Version +import jsonschema + +issue232 = Version( + path=Path(__file__).parent / "issue232", + remotes=m(), + name="issue232", +) + + +if __name__ == "__main__": + issue232.benchmark( + runner=Runner(), + Validator=jsonschema.Draft4Validator, + ) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/json_schema_test_suite.py b/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/json_schema_test_suite.py new file mode 100644 index 0000000..905fb6a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/benchmarks/json_schema_test_suite.py @@ -0,0 +1,12 @@ +""" +A performance benchmark using the official test suite. + +This benchmarks jsonschema using every valid example in the +JSON-Schema-Test-Suite. It will take some time to complete. +""" +from pyperf import Runner + +from jsonschema.tests._suite import Suite + +if __name__ == "__main__": + Suite().benchmark(runner=Runner()) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/cli.py b/.venv/lib/python3.9/site-packages/jsonschema/cli.py new file mode 100644 index 0000000..f5f6aef --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/cli.py @@ -0,0 +1,284 @@ +""" +The ``jsonschema`` command line. +""" + +from json import JSONDecodeError +from textwrap import dedent +import argparse +import json +import sys +import traceback + +try: + from importlib import metadata +except ImportError: + import importlib_metadata as metadata # type: ignore + +import attr + +from jsonschema._reflect import namedAny +from jsonschema.exceptions import SchemaError +from jsonschema.validators import RefResolver, validator_for + + +class _CannotLoadFile(Exception): + pass + + +@attr.s +class _Outputter(object): + + _formatter = attr.ib() + _stdout = attr.ib() + _stderr = attr.ib() + + @classmethod + def from_arguments(cls, arguments, stdout, stderr): + if arguments["output"] == "plain": + formatter = _PlainFormatter(arguments["error_format"]) + elif arguments["output"] == "pretty": + formatter = _PrettyFormatter() + return cls(formatter=formatter, stdout=stdout, stderr=stderr) + + def load(self, path): + try: + file = open(path) + except FileNotFoundError: + self.filenotfound_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() + + with file: + try: + return json.load(file) + except JSONDecodeError: + self.parsing_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() + + def filenotfound_error(self, **kwargs): + self._stderr.write(self._formatter.filenotfound_error(**kwargs)) + + def parsing_error(self, **kwargs): + self._stderr.write(self._formatter.parsing_error(**kwargs)) + + def validation_error(self, **kwargs): + self._stderr.write(self._formatter.validation_error(**kwargs)) + + def validation_success(self, **kwargs): + self._stdout.write(self._formatter.validation_success(**kwargs)) + + +@attr.s +class _PrettyFormatter(object): + + _ERROR_MSG = dedent( + """\ + ===[{type}]===({path})=== + + {body} + ----------------------------- + """, + ) + _SUCCESS_MSG = "===[SUCCESS]===({path})===\n" + + def filenotfound_error(self, path, exc_info): + return self._ERROR_MSG.format( + path=path, + type="FileNotFoundError", + body="{!r} does not exist.".format(path), + ) + + def parsing_error(self, path, exc_info): + exc_type, exc_value, exc_traceback = exc_info + exc_lines = "".join( + traceback.format_exception(exc_type, exc_value, exc_traceback), + ) + return self._ERROR_MSG.format( + path=path, + type=exc_type.__name__, + body=exc_lines, + ) + + def validation_error(self, instance_path, error): + return self._ERROR_MSG.format( + path=instance_path, + type=error.__class__.__name__, + body=error, + ) + + def validation_success(self, instance_path): + return self._SUCCESS_MSG.format(path=instance_path) + + +@attr.s +class _PlainFormatter(object): + + _error_format = attr.ib() + + def filenotfound_error(self, path, exc_info): + return "{!r} does not exist.\n".format(path) + + def parsing_error(self, path, exc_info): + return "Failed to parse {}: {}\n".format( + "" if path == "" else repr(path), + exc_info[1], + ) + + def validation_error(self, instance_path, error): + return self._error_format.format(file_name=instance_path, error=error) + + def validation_success(self, instance_path): + return "" + + +def _namedAnyWithDefault(name): + if "." not in name: + name = "jsonschema." + name + return namedAny(name) + + +parser = argparse.ArgumentParser( + description="JSON Schema Validation CLI", +) +parser.add_argument( + "-i", "--instance", + action="append", + dest="instances", + help=""" + a path to a JSON instance (i.e. filename.json) to validate (may + be specified multiple times). If no instances are provided via this + option, one will be expected on standard input. + """, +) +parser.add_argument( + "-F", "--error-format", + help=""" + the format to use for each validation error message, specified + in a form suitable for str.format. This string will be passed + one formatted object named 'error' for each ValidationError. + Only provide this option when using --output=plain, which is the + default. If this argument is unprovided and --output=plain is + used, a simple default representation will be used." + """, +) +parser.add_argument( + "-o", "--output", + choices=["plain", "pretty"], + default="plain", + help=""" + an output format to use. 'plain' (default) will produce minimal + text with one line for each error, while 'pretty' will produce + more detailed human-readable output on multiple lines. + """, +) +parser.add_argument( + "-V", "--validator", + type=_namedAnyWithDefault, + help=""" + the fully qualified object name of a validator to use, or, for + validators that are registered with jsonschema, simply the name + of the class. + """, +) +parser.add_argument( + "--base-uri", + help=""" + a base URI to assign to the provided schema, even if it does not + declare one (via e.g. $id). This option can be used if you wish to + resolve relative references to a particular URI (or local path) + """, +) +parser.add_argument( + "--version", + action="version", + version=metadata.version("jsonschema"), +) +parser.add_argument( + "schema", + help="the path to a JSON Schema to validate with (i.e. schema.json)", +) + + +def parse_args(args): + arguments = vars(parser.parse_args(args=args or ["--help"])) + if arguments["output"] != "plain" and arguments["error_format"]: + raise parser.error( + "--error-format can only be used with --output plain", + ) + if arguments["output"] == "plain" and arguments["error_format"] is None: + arguments["error_format"] = "{error.instance}: {error.message}\n" + return arguments + + +def _validate_instance(instance_path, instance, validator, outputter): + invalid = False + for error in validator.iter_errors(instance): + invalid = True + outputter.validation_error(instance_path=instance_path, error=error) + + if not invalid: + outputter.validation_success(instance_path=instance_path) + return invalid + + +def main(args=sys.argv[1:]): + sys.exit(run(arguments=parse_args(args=args))) + + +def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): + outputter = _Outputter.from_arguments( + arguments=arguments, + stdout=stdout, + stderr=stderr, + ) + + try: + schema = outputter.load(arguments["schema"]) + except _CannotLoadFile: + return 1 + + if arguments["validator"] is None: + arguments["validator"] = validator_for(schema) + + try: + arguments["validator"].check_schema(schema) + except SchemaError as error: + outputter.validation_error( + instance_path=arguments["schema"], + error=error, + ) + return 1 + + if arguments["instances"]: + load, instances = outputter.load, arguments["instances"] + else: + def load(_): + try: + return json.load(stdin) + except JSONDecodeError: + outputter.parsing_error( + path="", exc_info=sys.exc_info(), + ) + raise _CannotLoadFile() + instances = [""] + + resolver = RefResolver( + base_uri=arguments["base_uri"], + referrer=schema, + ) if arguments["base_uri"] is not None else None + + validator = arguments["validator"](schema, resolver=resolver) + exit_code = 0 + for each in instances: + try: + instance = load(each) + except _CannotLoadFile: + exit_code = 1 + else: + exit_code |= _validate_instance( + instance_path=each, + instance=instance, + validator=validator, + outputter=outputter, + ) + + return exit_code diff --git a/.venv/lib/python3.9/site-packages/jsonschema/exceptions.py b/.venv/lib/python3.9/site-packages/jsonschema/exceptions.py new file mode 100644 index 0000000..274e6c5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/exceptions.py @@ -0,0 +1,363 @@ +""" +Validation errors, and some surrounding helpers. +""" +from __future__ import annotations + +from collections import defaultdict, deque +from pprint import pformat +from textwrap import dedent, indent +import itertools + +import attr + +from jsonschema import _utils + +WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"]) +STRONG_MATCHES: frozenset[str] = frozenset() + +_unset = _utils.Unset() + + +class _Error(Exception): + def __init__( + self, + message, + validator=_unset, + path=(), + cause=None, + context=(), + validator_value=_unset, + instance=_unset, + schema=_unset, + schema_path=(), + parent=None, + ): + super(_Error, self).__init__( + message, + validator, + path, + cause, + context, + validator_value, + instance, + schema, + schema_path, + parent, + ) + self.message = message + self.path = self.relative_path = deque(path) + self.schema_path = self.relative_schema_path = deque(schema_path) + self.context = list(context) + self.cause = self.__cause__ = cause + self.validator = validator + self.validator_value = validator_value + self.instance = instance + self.schema = schema + self.parent = parent + + for error in context: + error.parent = self + + def __repr__(self): + return f"<{self.__class__.__name__}: {self.message!r}>" + + def __str__(self): + essential_for_verbose = ( + self.validator, self.validator_value, self.instance, self.schema, + ) + if any(m is _unset for m in essential_for_verbose): + return self.message + + schema_path = _utils.format_as_index( + container=self._word_for_schema_in_error_message, + indices=list(self.relative_schema_path)[:-1], + ) + instance_path = _utils.format_as_index( + container=self._word_for_instance_in_error_message, + indices=self.relative_path, + ) + prefix = 16 * " " + + return dedent( + f"""\ + {self.message} + + Failed validating {self.validator!r} in {schema_path}: + {indent(pformat(self.schema, width=72), prefix).lstrip()} + + On {instance_path}: + {indent(pformat(self.instance, width=72), prefix).lstrip()} + """.rstrip(), + ) + + @classmethod + def create_from(cls, other): + return cls(**other._contents()) + + @property + def absolute_path(self): + parent = self.parent + if parent is None: + return self.relative_path + + path = deque(self.relative_path) + path.extendleft(reversed(parent.absolute_path)) + return path + + @property + def absolute_schema_path(self): + parent = self.parent + if parent is None: + return self.relative_schema_path + + path = deque(self.relative_schema_path) + path.extendleft(reversed(parent.absolute_schema_path)) + return path + + @property + def json_path(self): + path = "$" + for elem in self.absolute_path: + if isinstance(elem, int): + path += "[" + str(elem) + "]" + else: + path += "." + elem + return path + + def _set(self, **kwargs): + for k, v in kwargs.items(): + if getattr(self, k) is _unset: + setattr(self, k, v) + + def _contents(self): + attrs = ( + "message", "cause", "context", "validator", "validator_value", + "path", "schema_path", "instance", "schema", "parent", + ) + return dict((attr, getattr(self, attr)) for attr in attrs) + + +class ValidationError(_Error): + """ + An instance was invalid under a provided schema. + """ + + _word_for_schema_in_error_message = "schema" + _word_for_instance_in_error_message = "instance" + + +class SchemaError(_Error): + """ + A schema was invalid under its corresponding metaschema. + """ + + _word_for_schema_in_error_message = "metaschema" + _word_for_instance_in_error_message = "schema" + + +@attr.s(hash=True) +class RefResolutionError(Exception): + """ + A ref could not be resolved. + """ + + _cause = attr.ib() + + def __str__(self): + return str(self._cause) + + +class UndefinedTypeCheck(Exception): + """ + A type checker was asked to check a type it did not have registered. + """ + + def __init__(self, type): + self.type = type + + def __str__(self): + return f"Type {self.type!r} is unknown to this type checker" + + +class UnknownType(Exception): + """ + A validator was asked to validate an instance against an unknown type. + """ + + def __init__(self, type, instance, schema): + self.type = type + self.instance = instance + self.schema = schema + + def __str__(self): + prefix = 16 * " " + + return dedent( + f"""\ + Unknown type {self.type!r} for validator with schema: + {indent(pformat(self.schema, width=72), prefix).lstrip()} + + While checking instance: + {indent(pformat(self.instance, width=72), prefix).lstrip()} + """.rstrip(), + ) + + +class FormatError(Exception): + """ + Validating a format failed. + """ + + def __init__(self, message, cause=None): + super(FormatError, self).__init__(message, cause) + self.message = message + self.cause = self.__cause__ = cause + + def __str__(self): + return self.message + + +class ErrorTree(object): + """ + ErrorTrees make it easier to check which validations failed. + """ + + _instance = _unset + + def __init__(self, errors=()): + self.errors = {} + self._contents = defaultdict(self.__class__) + + for error in errors: + container = self + for element in error.path: + container = container[element] + container.errors[error.validator] = error + + container._instance = error.instance + + def __contains__(self, index): + """ + Check whether ``instance[index]`` has any errors. + """ + + return index in self._contents + + def __getitem__(self, index): + """ + Retrieve the child tree one level down at the given ``index``. + + If the index is not in the instance that this tree corresponds + to and is not known by this tree, whatever error would be raised + by ``instance.__getitem__`` will be propagated (usually this is + some subclass of `LookupError`. + """ + + if self._instance is not _unset and index not in self: + self._instance[index] + return self._contents[index] + + def __setitem__(self, index, value): + """ + Add an error to the tree at the given ``index``. + """ + self._contents[index] = value + + def __iter__(self): + """ + Iterate (non-recursively) over the indices in the instance with errors. + """ + + return iter(self._contents) + + def __len__(self): + """ + Return the `total_errors`. + """ + return self.total_errors + + def __repr__(self): + return f"<{self.__class__.__name__} ({len(self)} total errors)>" + + @property + def total_errors(self): + """ + The total number of errors in the entire tree, including children. + """ + + child_errors = sum(len(tree) for _, tree in self._contents.items()) + return len(self.errors) + child_errors + + +def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): + """ + Create a key function that can be used to sort errors by relevance. + + Arguments: + weak (set): + a collection of validator names to consider to be "weak". + If there are two errors at the same level of the instance + and one is in the set of weak validator names, the other + error will take priority. By default, :validator:`anyOf` and + :validator:`oneOf` are considered weak validators and will + be superseded by other same-level validation errors. + + strong (set): + a collection of validator names to consider to be "strong" + """ + def relevance(error): + validator = error.validator + return -len(error.path), validator not in weak, validator in strong + return relevance + + +relevance = by_relevance() + + +def best_match(errors, key=relevance): + """ + Try to find an error that appears to be the best match among given errors. + + In general, errors that are higher up in the instance (i.e. for which + `ValidationError.path` is shorter) are considered better matches, + since they indicate "more" is wrong with the instance. + + If the resulting match is either :validator:`oneOf` or :validator:`anyOf`, + the *opposite* assumption is made -- i.e. the deepest error is picked, + since these validators only need to match once, and any other errors may + not be relevant. + + Arguments: + errors (collections.abc.Iterable): + + the errors to select from. Do not provide a mixture of + errors from different validation attempts (i.e. from + different instances or schemas), since it won't produce + sensical output. + + key (collections.abc.Callable): + + the key to use when sorting errors. See `relevance` and + transitively `by_relevance` for more details (the default is + to sort with the defaults of that function). Changing the + default is only useful if you want to change the function + that rates errors but still want the error context descent + done by this function. + + Returns: + the best matching error, or ``None`` if the iterable was empty + + .. note:: + + This function is a heuristic. Its return value may change for a given + set of inputs from version to version if better heuristics are added. + """ + errors = iter(errors) + best = next(errors, None) + if best is None: + return + best = max(itertools.chain([best], errors), key=key) + + while best.context: + best = min(best.context, key=key) + return best diff --git a/.venv/lib/python3.9/site-packages/jsonschema/protocols.py b/.venv/lib/python3.9/site-packages/jsonschema/protocols.py new file mode 100644 index 0000000..ea00446 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/protocols.py @@ -0,0 +1,167 @@ +""" +typing.Protocol classes for jsonschema interfaces. +""" + +# for reference material on Protocols, see +# https://www.python.org/dev/peps/pep-0544/ + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, ClassVar, Iterator +import sys + +# doing these imports with `try ... except ImportError` doesn't pass mypy +# checking because mypy sees `typing._SpecialForm` and +# `typing_extensions._SpecialForm` as incompatible +# +# see: +# https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module +# https://github.com/python/mypy/issues/4427 +if sys.version_info >= (3, 8): + from typing import Protocol, runtime_checkable +else: + from typing_extensions import Protocol, runtime_checkable + +# in order for Sphinx to resolve references accurately from type annotations, +# it needs to see names like `jsonschema.TypeChecker` +# therefore, only import at type-checking time (to avoid circular references), +# but use `jsonschema` for any types which will otherwise not be resolvable +if TYPE_CHECKING: + import jsonschema + +from jsonschema.exceptions import ValidationError +from jsonschema.validators import RefResolver + +# For code authors working on the validator protocol, these are the three +# use-cases which should be kept in mind: +# +# 1. As a protocol class, it can be used in type annotations to describe the +# available methods and attributes of a validator +# 2. It is the source of autodoc for the validator documentation +# 3. It is runtime_checkable, meaning that it can be used in isinstance() +# checks. +# +# Since protocols are not base classes, isinstance() checking is limited in +# its capabilities. See docs on runtime_checkable for detail + + +@runtime_checkable +class Validator(Protocol): + """ + The protocol to which all validator classes should adhere. + + :argument schema: the schema that the validator object + will validate with. It is assumed to be valid, and providing + an invalid schema can lead to undefined behavior. See + `Validator.check_schema` to validate a schema first. + :argument resolver: an instance of `jsonschema.RefResolver` that will be + used to resolve :validator:`$ref` properties (JSON references). If + unprovided, one will be created. + :argument format_checker: an instance of `jsonschema.FormatChecker` + whose `jsonschema.FormatChecker.conforms` method will be called to + check and see if instances conform to each :validator:`format` + property present in the schema. If unprovided, no validation + will be done for :validator:`format`. Certain formats require + additional packages to be installed (ipv5, uri, color, date-time). + The required packages can be found at the bottom of this page. + """ + + #: An object representing the validator's meta schema (the schema that + #: describes valid schemas in the given version). + META_SCHEMA: ClassVar[dict] + + #: A mapping of validator names (`str`\s) to functions + #: that validate the validator property with that name. For more + #: information see `creating-validators`. + VALIDATORS: ClassVar[dict] + + #: A `jsonschema.TypeChecker` that will be used when validating + #: :validator:`type` properties in JSON schemas. + TYPE_CHECKER: ClassVar[jsonschema.TypeChecker] + + #: The schema that was passed in when initializing the object. + schema: dict | bool + + def __init__( + self, + schema: dict | bool, + resolver: RefResolver | None = None, + format_checker: jsonschema.FormatChecker | None = None, + ) -> None: + ... + + @classmethod + def check_schema(cls, schema: dict) -> None: + """ + Validate the given schema against the validator's `META_SCHEMA`. + + :raises: `jsonschema.exceptions.SchemaError` if the schema + is invalid + """ + + def is_type(self, instance: Any, type: str) -> bool: + """ + Check if the instance is of the given (JSON Schema) type. + + :type type: str + :rtype: bool + :raises: `jsonschema.exceptions.UnknownType` if ``type`` + is not a known type. + """ + + def is_valid(self, instance: dict) -> bool: + """ + Check if the instance is valid under the current `schema`. + + :rtype: bool + + >>> schema = {"maxItems" : 2} + >>> Draft3Validator(schema).is_valid([2, 3, 4]) + False + """ + + def iter_errors(self, instance: dict) -> Iterator[ValidationError]: + r""" + Lazily yield each of the validation errors in the given instance. + + :rtype: an `collections.abc.Iterable` of + `jsonschema.exceptions.ValidationError`\s + + >>> schema = { + ... "type" : "array", + ... "items" : {"enum" : [1, 2, 3]}, + ... "maxItems" : 2, + ... } + >>> v = Draft3Validator(schema) + >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): + ... print(error.message) + 4 is not one of [1, 2, 3] + [2, 3, 4] is too long + """ + + def validate(self, instance: dict) -> None: + """ + Check if the instance is valid under the current `schema`. + + :raises: `jsonschema.exceptions.ValidationError` if the + instance is invalid + + >>> schema = {"maxItems" : 2} + >>> Draft3Validator(schema).validate([2, 3, 4]) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + """ + + def evolve(self, **kwargs) -> "Validator": + """ + Create a new validator like this one, but with given changes. + + Preserves all other attributes, so can be used to e.g. create a + validator with a different schema but with the same :validator:`$ref` + resolution behavior. + + >>> validator = Draft202012Validator({}) + >>> validator.evolve(schema={"type": "number"}) + Draft202012Validator(schema={'type': 'number'}, format_checker=None) + """ diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft2019-09.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft2019-09.json new file mode 100644 index 0000000..2248a0c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft2019-09.json @@ -0,0 +1,42 @@ +{ + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "https://json-schema.org/draft/2019-09/schema", + "$vocabulary": { + "https://json-schema.org/draft/2019-09/vocab/core": true, + "https://json-schema.org/draft/2019-09/vocab/applicator": true, + "https://json-schema.org/draft/2019-09/vocab/validation": true, + "https://json-schema.org/draft/2019-09/vocab/meta-data": true, + "https://json-schema.org/draft/2019-09/vocab/format": false, + "https://json-schema.org/draft/2019-09/vocab/content": true + }, + "$recursiveAnchor": true, + + "title": "Core and Validation specifications meta-schema", + "allOf": [ + {"$ref": "meta/core"}, + {"$ref": "meta/applicator"}, + {"$ref": "meta/validation"}, + {"$ref": "meta/meta-data"}, + {"$ref": "meta/format"}, + {"$ref": "meta/content"} + ], + "type": ["object", "boolean"], + "properties": { + "definitions": { + "$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.", + "type": "object", + "additionalProperties": { "$recursiveRef": "#" }, + "default": {} + }, + "dependencies": { + "$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"", + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$recursiveRef": "#" }, + { "$ref": "meta/validation#/$defs/stringArray" } + ] + } + } + } +} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft2020-12.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft2020-12.json new file mode 100644 index 0000000..d5e2d31 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft2020-12.json @@ -0,0 +1,58 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://json-schema.org/draft/2020-12/schema", + "$vocabulary": { + "https://json-schema.org/draft/2020-12/vocab/core": true, + "https://json-schema.org/draft/2020-12/vocab/applicator": true, + "https://json-schema.org/draft/2020-12/vocab/unevaluated": true, + "https://json-schema.org/draft/2020-12/vocab/validation": true, + "https://json-schema.org/draft/2020-12/vocab/meta-data": true, + "https://json-schema.org/draft/2020-12/vocab/format-annotation": true, + "https://json-schema.org/draft/2020-12/vocab/content": true + }, + "$dynamicAnchor": "meta", + + "title": "Core and Validation specifications meta-schema", + "allOf": [ + {"$ref": "meta/core"}, + {"$ref": "meta/applicator"}, + {"$ref": "meta/unevaluated"}, + {"$ref": "meta/validation"}, + {"$ref": "meta/meta-data"}, + {"$ref": "meta/format-annotation"}, + {"$ref": "meta/content"} + ], + "type": ["object", "boolean"], + "$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.", + "properties": { + "definitions": { + "$comment": "\"definitions\" has been replaced by \"$defs\".", + "type": "object", + "additionalProperties": { "$dynamicRef": "#meta" }, + "deprecated": true, + "default": {} + }, + "dependencies": { + "$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.", + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$dynamicRef": "#meta" }, + { "$ref": "meta/validation#/$defs/stringArray" } + ] + }, + "deprecated": true, + "default": {} + }, + "$recursiveAnchor": { + "$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".", + "$ref": "meta/core#/$defs/anchorString", + "deprecated": true + }, + "$recursiveRef": { + "$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".", + "$ref": "meta/core#/$defs/uriReferenceString", + "deprecated": true + } + } +} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft3.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft3.json new file mode 100644 index 0000000..23d59b6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft3.json @@ -0,0 +1,177 @@ +{ + "$schema" : "http://json-schema.org/draft-03/schema#", + "id" : "http://json-schema.org/draft-03/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#", "type" : "object"}, + "default" : {} + }, + + "patternProperties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalItems" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "required" : { + "type" : "boolean", + "default" : false + }, + + "dependencies" : { + "type" : ["string", "array", "object"], + "additionalProperties" : { + "type" : ["string", "array", {"$ref" : "#"}], + "items" : { + "type" : "string" + } + }, + "default" : {} + }, + + "minimum" : { + "type" : "number" + }, + + "maximum" : { + "type" : "number" + }, + + "exclusiveMinimum" : { + "type" : "boolean", + "default" : false + }, + + "exclusiveMaximum" : { + "type" : "boolean", + "default" : false + }, + + "maxDecimal": { + "minimum": 0, + "type": "number" + }, + + "minItems" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "default" : false + }, + + "pattern" : { + "type" : "string", + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer" + }, + + "enum" : { + "type" : "array" + }, + + "default" : { + "type" : "any" + }, + + "title" : { + "type" : "string" + }, + + "description" : { + "type" : "string" + }, + + "format" : { + "type" : "string" + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "exclusiveMinimum" : true, + "default" : 1 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "id" : { + "type" : "string", + "format" : "uri" + }, + + "$ref" : { + "type" : "string", + "format" : "uri" + }, + + "$schema" : { + "type" : "string", + "format" : "uri" + } + }, + + "dependencies" : { + "exclusiveMinimum" : "minimum", + "exclusiveMaximum" : "maximum" + }, + + "default" : {} +} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft4.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft4.json new file mode 100644 index 0000000..ba0c117 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft4.json @@ -0,0 +1,149 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "format": "uri", + "type": "string" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array" + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft6.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft6.json new file mode 100644 index 0000000..a0d2bf7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft6.json @@ -0,0 +1,153 @@ +{ + "$schema": "http://json-schema.org/draft-06/schema#", + "$id": "http://json-schema.org/draft-06/schema#", + "title": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "nonNegativeInteger": { + "type": "integer", + "minimum": 0 + }, + "nonNegativeIntegerDefault0": { + "allOf": [ + { "$ref": "#/definitions/nonNegativeInteger" }, + { "default": 0 } + ] + }, + "simpleTypes": { + "enum": [ + "array", + "boolean", + "integer", + "null", + "number", + "object", + "string" + ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "uniqueItems": true, + "default": [] + } + }, + "type": ["object", "boolean"], + "properties": { + "$id": { + "type": "string", + "format": "uri-reference" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "$ref": { + "type": "string", + "format": "uri-reference" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "examples": { + "type": "array", + "items": {} + }, + "multipleOf": { + "type": "number", + "exclusiveMinimum": 0 + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "number" + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "number" + }, + "maxLength": { "$ref": "#/definitions/nonNegativeInteger" }, + "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { "$ref": "#" }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/nonNegativeInteger" }, + "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "contains": { "$ref": "#" }, + "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" }, + "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { "$ref": "#" }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "propertyNames": { "format": "regex" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "propertyNames": { "$ref": "#" }, + "const": {}, + "enum": { + "type": "array" + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "default": {} +} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft7.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft7.json new file mode 100644 index 0000000..746cde9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/draft7.json @@ -0,0 +1,166 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://json-schema.org/draft-07/schema#", + "title": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "nonNegativeInteger": { + "type": "integer", + "minimum": 0 + }, + "nonNegativeIntegerDefault0": { + "allOf": [ + { "$ref": "#/definitions/nonNegativeInteger" }, + { "default": 0 } + ] + }, + "simpleTypes": { + "enum": [ + "array", + "boolean", + "integer", + "null", + "number", + "object", + "string" + ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "uniqueItems": true, + "default": [] + } + }, + "type": ["object", "boolean"], + "properties": { + "$id": { + "type": "string", + "format": "uri-reference" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "$ref": { + "type": "string", + "format": "uri-reference" + }, + "$comment": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": true, + "readOnly": { + "type": "boolean", + "default": false + }, + "examples": { + "type": "array", + "items": true + }, + "multipleOf": { + "type": "number", + "exclusiveMinimum": 0 + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "number" + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "number" + }, + "maxLength": { "$ref": "#/definitions/nonNegativeInteger" }, + "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { "$ref": "#" }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": true + }, + "maxItems": { "$ref": "#/definitions/nonNegativeInteger" }, + "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "contains": { "$ref": "#" }, + "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" }, + "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { "$ref": "#" }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "propertyNames": { "format": "regex" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "propertyNames": { "$ref": "#" }, + "const": true, + "enum": { + "type": "array", + "items": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "contentMediaType": { "type": "string" }, + "contentEncoding": { "type": "string" }, + "if": {"$ref": "#"}, + "then": {"$ref": "#"}, + "else": {"$ref": "#"}, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "default": true +} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/schemas/vocabularies.json b/.venv/lib/python3.9/site-packages/jsonschema/schemas/vocabularies.json new file mode 100644 index 0000000..bca1705 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/schemas/vocabularies.json @@ -0,0 +1 @@ +{"https://json-schema.org/draft/2020-12/meta/content": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/content", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/content": true}, "$dynamicAnchor": "meta", "title": "Content vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"contentEncoding": {"type": "string"}, "contentMediaType": {"type": "string"}, "contentSchema": {"$dynamicRef": "#meta"}}}, "https://json-schema.org/draft/2020-12/meta/unevaluated": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/unevaluated", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/unevaluated": true}, "$dynamicAnchor": "meta", "title": "Unevaluated applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"unevaluatedItems": {"$dynamicRef": "#meta"}, "unevaluatedProperties": {"$dynamicRef": "#meta"}}}, "https://json-schema.org/draft/2020-12/meta/format-annotation": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/format-annotation", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/format-annotation": true}, "$dynamicAnchor": "meta", "title": "Format vocabulary meta-schema for annotation results", "type": ["object", "boolean"], "properties": {"format": {"type": "string"}}}, "https://json-schema.org/draft/2020-12/meta/applicator": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/applicator", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/applicator": true}, "$dynamicAnchor": "meta", "title": "Applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"prefixItems": {"$ref": "#/$defs/schemaArray"}, "items": {"$dynamicRef": "#meta"}, "contains": {"$dynamicRef": "#meta"}, "additionalProperties": {"$dynamicRef": "#meta"}, "properties": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "default": {}}, "patternProperties": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "propertyNames": {"format": "regex"}, "default": {}}, "dependentSchemas": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "default": {}}, "propertyNames": {"$dynamicRef": "#meta"}, "if": {"$dynamicRef": "#meta"}, "then": {"$dynamicRef": "#meta"}, "else": {"$dynamicRef": "#meta"}, "allOf": {"$ref": "#/$defs/schemaArray"}, "anyOf": {"$ref": "#/$defs/schemaArray"}, "oneOf": {"$ref": "#/$defs/schemaArray"}, "not": {"$dynamicRef": "#meta"}}, "$defs": {"schemaArray": {"type": "array", "minItems": 1, "items": {"$dynamicRef": "#meta"}}}}, "https://json-schema.org/draft/2020-12/meta/meta-data": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/meta-data", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/meta-data": true}, "$dynamicAnchor": "meta", "title": "Meta-data vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"title": {"type": "string"}, "description": {"type": "string"}, "default": true, "deprecated": {"type": "boolean", "default": false}, "readOnly": {"type": "boolean", "default": false}, "writeOnly": {"type": "boolean", "default": false}, "examples": {"type": "array", "items": true}}}, "https://json-schema.org/draft/2020-12/meta/core": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/core", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/core": true}, "$dynamicAnchor": "meta", "title": "Core vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"$id": {"$ref": "#/$defs/uriReferenceString", "$comment": "Non-empty fragments not allowed.", "pattern": "^[^#]*#?$"}, "$schema": {"$ref": "#/$defs/uriString"}, "$ref": {"$ref": "#/$defs/uriReferenceString"}, "$anchor": {"$ref": "#/$defs/anchorString"}, "$dynamicRef": {"$ref": "#/$defs/uriReferenceString"}, "$dynamicAnchor": {"$ref": "#/$defs/anchorString"}, "$vocabulary": {"type": "object", "propertyNames": {"$ref": "#/$defs/uriString"}, "additionalProperties": {"type": "boolean"}}, "$comment": {"type": "string"}, "$defs": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}}}, "$defs": {"anchorString": {"type": "string", "pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"}, "uriString": {"type": "string", "format": "uri"}, "uriReferenceString": {"type": "string", "format": "uri-reference"}}}, "https://json-schema.org/draft/2020-12/meta/validation": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/validation", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/validation": true}, "$dynamicAnchor": "meta", "title": "Validation vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"type": {"anyOf": [{"$ref": "#/$defs/simpleTypes"}, {"type": "array", "items": {"$ref": "#/$defs/simpleTypes"}, "minItems": 1, "uniqueItems": true}]}, "const": true, "enum": {"type": "array", "items": true}, "multipleOf": {"type": "number", "exclusiveMinimum": 0}, "maximum": {"type": "number"}, "exclusiveMaximum": {"type": "number"}, "minimum": {"type": "number"}, "exclusiveMinimum": {"type": "number"}, "maxLength": {"$ref": "#/$defs/nonNegativeInteger"}, "minLength": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "pattern": {"type": "string", "format": "regex"}, "maxItems": {"$ref": "#/$defs/nonNegativeInteger"}, "minItems": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "uniqueItems": {"type": "boolean", "default": false}, "maxContains": {"$ref": "#/$defs/nonNegativeInteger"}, "minContains": {"$ref": "#/$defs/nonNegativeInteger", "default": 1}, "maxProperties": {"$ref": "#/$defs/nonNegativeInteger"}, "minProperties": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "required": {"$ref": "#/$defs/stringArray"}, "dependentRequired": {"type": "object", "additionalProperties": {"$ref": "#/$defs/stringArray"}}}, "$defs": {"nonNegativeInteger": {"type": "integer", "minimum": 0}, "nonNegativeIntegerDefault0": {"$ref": "#/$defs/nonNegativeInteger", "default": 0}, "simpleTypes": {"enum": ["array", "boolean", "integer", "null", "number", "object", "string"]}, "stringArray": {"type": "array", "items": {"type": "string"}, "uniqueItems": true, "default": []}}}, "https://json-schema.org/draft/2019-09/meta/content": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/content", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/content": true}, "$recursiveAnchor": true, "title": "Content vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"contentMediaType": {"type": "string"}, "contentEncoding": {"type": "string"}, "contentSchema": {"$recursiveRef": "#"}}}, "https://json-schema.org/draft/2019-09/meta/applicator": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/applicator", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/applicator": true}, "$recursiveAnchor": true, "title": "Applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"additionalItems": {"$recursiveRef": "#"}, "unevaluatedItems": {"$recursiveRef": "#"}, "items": {"anyOf": [{"$recursiveRef": "#"}, {"$ref": "#/$defs/schemaArray"}]}, "contains": {"$recursiveRef": "#"}, "additionalProperties": {"$recursiveRef": "#"}, "unevaluatedProperties": {"$recursiveRef": "#"}, "properties": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "default": {}}, "patternProperties": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "propertyNames": {"format": "regex"}, "default": {}}, "dependentSchemas": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}}, "propertyNames": {"$recursiveRef": "#"}, "if": {"$recursiveRef": "#"}, "then": {"$recursiveRef": "#"}, "else": {"$recursiveRef": "#"}, "allOf": {"$ref": "#/$defs/schemaArray"}, "anyOf": {"$ref": "#/$defs/schemaArray"}, "oneOf": {"$ref": "#/$defs/schemaArray"}, "not": {"$recursiveRef": "#"}}, "$defs": {"schemaArray": {"type": "array", "minItems": 1, "items": {"$recursiveRef": "#"}}}}, "https://json-schema.org/draft/2019-09/meta/meta-data": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/meta-data", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/meta-data": true}, "$recursiveAnchor": true, "title": "Meta-data vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"title": {"type": "string"}, "description": {"type": "string"}, "default": true, "deprecated": {"type": "boolean", "default": false}, "readOnly": {"type": "boolean", "default": false}, "writeOnly": {"type": "boolean", "default": false}, "examples": {"type": "array", "items": true}}}, "https://json-schema.org/draft/2019-09/meta/core": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/core", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/core": true}, "$recursiveAnchor": true, "title": "Core vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"$id": {"type": "string", "format": "uri-reference", "$comment": "Non-empty fragments not allowed.", "pattern": "^[^#]*#?$"}, "$schema": {"type": "string", "format": "uri"}, "$anchor": {"type": "string", "pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"}, "$ref": {"type": "string", "format": "uri-reference"}, "$recursiveRef": {"type": "string", "format": "uri-reference"}, "$recursiveAnchor": {"type": "boolean", "default": false}, "$vocabulary": {"type": "object", "propertyNames": {"type": "string", "format": "uri"}, "additionalProperties": {"type": "boolean"}}, "$comment": {"type": "string"}, "$defs": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "default": {}}}}, "https://json-schema.org/draft/2019-09/meta/validation": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/validation", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/validation": true}, "$recursiveAnchor": true, "title": "Validation vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"multipleOf": {"type": "number", "exclusiveMinimum": 0}, "maximum": {"type": "number"}, "exclusiveMaximum": {"type": "number"}, "minimum": {"type": "number"}, "exclusiveMinimum": {"type": "number"}, "maxLength": {"$ref": "#/$defs/nonNegativeInteger"}, "minLength": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "pattern": {"type": "string", "format": "regex"}, "maxItems": {"$ref": "#/$defs/nonNegativeInteger"}, "minItems": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "uniqueItems": {"type": "boolean", "default": false}, "maxContains": {"$ref": "#/$defs/nonNegativeInteger"}, "minContains": {"$ref": "#/$defs/nonNegativeInteger", "default": 1}, "maxProperties": {"$ref": "#/$defs/nonNegativeInteger"}, "minProperties": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "required": {"$ref": "#/$defs/stringArray"}, "dependentRequired": {"type": "object", "additionalProperties": {"$ref": "#/$defs/stringArray"}}, "const": true, "enum": {"type": "array", "items": true}, "type": {"anyOf": [{"$ref": "#/$defs/simpleTypes"}, {"type": "array", "items": {"$ref": "#/$defs/simpleTypes"}, "minItems": 1, "uniqueItems": true}]}}, "$defs": {"nonNegativeInteger": {"type": "integer", "minimum": 0}, "nonNegativeIntegerDefault0": {"$ref": "#/$defs/nonNegativeInteger", "default": 0}, "simpleTypes": {"enum": ["array", "boolean", "integer", "null", "number", "object", "string"]}, "stringArray": {"type": "array", "items": {"type": "string"}, "uniqueItems": true, "default": []}}}, "https://json-schema.org/draft/2019-09/meta/hyper-schema": {"$schema": "https://json-schema.org/draft/2019-09/hyper-schema", "$id": "https://json-schema.org/draft/2019-09/meta/hyper-schema", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/hyper-schema": true}, "$recursiveAnchor": true, "title": "JSON Hyper-Schema Vocabulary Schema", "type": ["object", "boolean"], "properties": {"base": {"type": "string", "format": "uri-template"}, "links": {"type": "array", "items": {"$ref": "https://json-schema.org/draft/2019-09/links"}}}, "links": [{"rel": "self", "href": "{+%24id}"}]}, "https://json-schema.org/draft/2019-09/meta/format": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/format", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/format": true}, "$recursiveAnchor": true, "title": "Format vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"format": {"type": "string"}}}} diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/__init__.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/_helpers.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/_helpers.py new file mode 100644 index 0000000..70f291f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/_helpers.py @@ -0,0 +1,5 @@ +def bug(issue=None): + message = "A known bug." + if issue is not None: + message += " See issue #{issue}.".format(issue=issue) + return message diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/_suite.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/_suite.py new file mode 100644 index 0000000..870304d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/_suite.py @@ -0,0 +1,228 @@ +""" +Python representations of the JSON Schema Test Suite tests. +""" + +from functools import partial +from pathlib import Path +import json +import os +import re +import subprocess +import sys +import unittest + +import attr + +from jsonschema.validators import _VALIDATORS +import jsonschema + + +def _find_suite(): + root = os.environ.get("JSON_SCHEMA_TEST_SUITE") + if root is not None: + return Path(root) + + root = Path(jsonschema.__file__).parent.parent / "json" + if not root.is_dir(): # pragma: no cover + raise ValueError( + ( + "Can't find the JSON-Schema-Test-Suite directory. " + "Set the 'JSON_SCHEMA_TEST_SUITE' environment " + "variable or run the tests from alongside a checkout " + "of the suite." + ), + ) + return root + + +@attr.s(hash=True) +class Suite(object): + + _root = attr.ib(default=attr.Factory(_find_suite)) + + def _remotes(self): + jsonschema_suite = self._root.joinpath("bin", "jsonschema_suite") + remotes = subprocess.check_output( + [sys.executable, str(jsonschema_suite), "remotes"], + ) + return { + "http://localhost:1234/" + name.replace("\\", "/"): schema + for name, schema in json.loads(remotes.decode("utf-8")).items() + } + + def benchmark(self, runner): # pragma: no cover + for name, Validator in _VALIDATORS.items(): + self.version(name=name).benchmark( + runner=runner, + Validator=Validator, + ) + + def version(self, name): + return Version( + name=name, + path=self._root.joinpath("tests", name), + remotes=self._remotes(), + ) + + +@attr.s(hash=True) +class Version(object): + + _path = attr.ib() + _remotes = attr.ib() + + name = attr.ib() + + def benchmark(self, runner, **kwargs): # pragma: no cover + for suite in self.tests(): + for test in suite: + runner.bench_func( + test.fully_qualified_name, + partial(test.validate_ignoring_errors, **kwargs), + ) + + def tests(self): + return ( + test + for child in self._path.glob("*.json") + for test in self._tests_in( + subject=child.name[:-5], + path=child, + ) + ) + + def format_tests(self): + path = self._path.joinpath("optional", "format") + return ( + test + for child in path.glob("*.json") + for test in self._tests_in( + subject=child.name[:-5], + path=child, + ) + ) + + def optional_tests_of(self, name): + return self._tests_in( + subject=name, + path=self._path.joinpath("optional", name + ".json"), + ) + + def to_unittest_testcase(self, *suites, **kwargs): + name = kwargs.pop("name", "Test" + self.name.title().replace("-", "")) + methods = { + test.method_name: test.to_unittest_method(**kwargs) + for suite in suites + for tests in suite + for test in tests + } + cls = type(name, (unittest.TestCase,), methods) + + try: + cls.__module__ = _someone_save_us_the_module_of_the_caller() + except Exception: # pragma: no cover + # We're doing crazy things, so if they go wrong, like a function + # behaving differently on some other interpreter, just make them + # not happen. + pass + + return cls + + def _tests_in(self, subject, path): + for each in json.loads(path.read_text(encoding="utf-8")): + yield ( + _Test( + version=self, + subject=subject, + case_description=each["description"], + schema=each["schema"], + remotes=self._remotes, + **test, + ) for test in each["tests"] + ) + + +@attr.s(hash=True, repr=False) +class _Test(object): + + version = attr.ib() + + subject = attr.ib() + case_description = attr.ib() + description = attr.ib() + + data = attr.ib() + schema = attr.ib(repr=False) + + valid = attr.ib() + + _remotes = attr.ib() + + comment = attr.ib(default=None) + + def __repr__(self): # pragma: no cover + return "".format(self.fully_qualified_name) + + @property + def fully_qualified_name(self): # pragma: no cover + return " > ".join( + [ + self.version.name, + self.subject, + self.case_description, + self.description, + ], + ) + + @property + def method_name(self): + delimiters = r"[\W\- ]+" + return "test_{}_{}_{}".format( + re.sub(delimiters, "_", self.subject), + re.sub(delimiters, "_", self.case_description), + re.sub(delimiters, "_", self.description), + ) + + def to_unittest_method(self, skip=lambda test: None, **kwargs): + if self.valid: + def fn(this): + self.validate(**kwargs) + else: + def fn(this): + with this.assertRaises(jsonschema.ValidationError): + self.validate(**kwargs) + + fn.__name__ = self.method_name + reason = skip(self) + return unittest.skipIf(reason is not None, reason)(fn) + + def validate(self, Validator, **kwargs): + resolver = jsonschema.RefResolver.from_schema( + schema=self.schema, + store=self._remotes, + id_of=Validator.ID_OF, + ) + validator = Validator(schema=self.schema, resolver=resolver, **kwargs) + validator.validate(instance=self.data) + + def validate_ignoring_errors(self, Validator): # pragma: no cover + try: + self.validate(Validator=Validator) + except jsonschema.ValidationError: + pass + + +def _someone_save_us_the_module_of_the_caller(): + """ + The FQON of the module 2nd stack frames up from here. + + This is intended to allow us to dynamicallly return test case classes that + are indistinguishable from being defined in the module that wants them. + + Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run + the class that really is running. + + Save us all, this is all so so so so so terrible. + """ + + return sys._getframe(2).f_globals["__name__"] diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/fuzz_validate.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/fuzz_validate.py new file mode 100644 index 0000000..a8c62ac --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/fuzz_validate.py @@ -0,0 +1,49 @@ +""" +Fuzzing setup for OSS-Fuzz. + +See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the +other half of the setup here. +""" +import sys + +from hypothesis import given, strategies + +import jsonschema + +PRIM = strategies.one_of( + strategies.booleans(), + strategies.integers(), + strategies.floats(allow_nan=False, allow_infinity=False), + strategies.text(), +) +DICT = strategies.recursive( + base=strategies.one_of( + strategies.booleans(), + strategies.dictionaries(strategies.text(), PRIM), + ), + extend=lambda inner: strategies.dictionaries(strategies.text(), inner), +) + + +@given(obj1=DICT, obj2=DICT) +def test_schemas(obj1, obj2): + try: + jsonschema.validate(instance=obj1, schema=obj2) + except jsonschema.exceptions.ValidationError: + pass + except jsonschema.exceptions.SchemaError: + pass + + +def main(): + atheris.Setup( + sys.argv, + test_schemas.hypothesis.fuzz_one_input, + enable_python_coverage=True, + ) + atheris.Fuzz() + + +if __name__ == "__main__": + import atheris + main() diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_cli.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_cli.py new file mode 100644 index 0000000..6d42af6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_cli.py @@ -0,0 +1,911 @@ +from contextlib import redirect_stderr, redirect_stdout +from io import StringIO +from json import JSONDecodeError +from pathlib import Path +from textwrap import dedent +from unittest import TestCase +import json +import os +import subprocess +import sys +import tempfile + +try: # pragma: no cover + from importlib import metadata +except ImportError: # pragma: no cover + import importlib_metadata as metadata # type: ignore + +from pyrsistent import m + +from jsonschema import Draft4Validator, Draft202012Validator, cli +from jsonschema.exceptions import ( + RefResolutionError, + SchemaError, + ValidationError, +) +from jsonschema.validators import _LATEST_VERSION, validate + + +def fake_validator(*errors): + errors = list(reversed(errors)) + + class FakeValidator(object): + def __init__(self, *args, **kwargs): + pass + + def iter_errors(self, instance): + if errors: + return errors.pop() + return [] # pragma: no cover + + @classmethod + def check_schema(self, schema): + pass + + return FakeValidator + + +def fake_open(all_contents): + def open(path): + contents = all_contents.get(path) + if contents is None: + raise FileNotFoundError(path) + return StringIO(contents) + return open + + +def _message_for(non_json): + try: + json.loads(non_json) + except JSONDecodeError as error: + return str(error) + else: # pragma: no cover + raise RuntimeError("Tried and failed to capture a JSON dump error.") + + +class TestCLI(TestCase): + def run_cli( + self, argv, files=m(), stdin=StringIO(), exit_code=0, **override, + ): + arguments = cli.parse_args(argv) + arguments.update(override) + + self.assertFalse(hasattr(cli, "open")) + cli.open = fake_open(files) + try: + stdout, stderr = StringIO(), StringIO() + actual_exit_code = cli.run( + arguments, + stdin=stdin, + stdout=stdout, + stderr=stderr, + ) + finally: + del cli.open + + self.assertEqual( + actual_exit_code, exit_code, msg=dedent( + """ + Expected an exit code of {} != {}. + + stdout: {} + + stderr: {} + """.format( + exit_code, + actual_exit_code, + stdout.getvalue(), + stderr.getvalue(), + ), + ), + ) + return stdout.getvalue(), stderr.getvalue() + + def assertOutputs(self, stdout="", stderr="", **kwargs): + self.assertEqual( + self.run_cli(**kwargs), + (dedent(stdout), dedent(stderr)), + ) + + def test_invalid_instance(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_pretty_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + I am an error! + ----------------------------- + """, + ) + + def test_invalid_instance_explicit_plain_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["--output", "plain", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_multiple_errors(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: First error + 12: Second error + """, + ) + + def test_invalid_instance_multiple_errors_pretty_output(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + First error + ----------------------------- + ===[ValidationError]===(some_instance)=== + + Second error + ----------------------------- + """, + ) + + def test_multiple_invalid_instances(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + 12: An error + 12: Another error + foo: BOOM + """, + ) + + def test_multiple_invalid_instances_pretty_output(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--output", "pretty", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_first_instance)=== + + An error + ----------------------------- + ===[ValidationError]===(some_first_instance)=== + + Another error + ----------------------------- + ===[ValidationError]===(some_second_instance)=== + + BOOM + ----------------------------- + """, + ) + + def test_custom_error_format(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:", + ) + + def test_invalid_schema(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_pretty_output(self): + schema = {"type": 12} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_multiple_errors(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12, "items": 57}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 57: 57 is not of type 'object', 'boolean' + """, + ) + + def test_invalid_schema_multiple_errors_pretty_output(self): + schema = {"type": 12, "items": 57} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_with_invalid_instance(self): + """ + "Validating" an instance that's invalid under an invalid schema + just shows the schema error. + """ + self.assertOutputs( + files=dict( + some_schema='{"type": 12, "minimum": 30}', + some_instance="13", + ), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_with_invalid_instance_pretty_output(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance=instance) + error = str(e.exception) + + self.assertOutputs( + files=dict( + some_schema=json.dumps(schema), + some_instance=json.dumps(instance), + ), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_instance_continues_with_the_rest(self): + self.assertOutputs( + files=dict( + some_schema='{"minimum": 30}', + first_instance="not valid JSON!", + second_instance="12", + ), + argv=[ + "-i", "first_instance", + "-i", "second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + Failed to parse 'first_instance': {} + 12: 12 is less than the minimum of 30 + """.format(_message_for("not valid JSON!")), + ) + + def test_custom_error_format_applies_to_schema_errors(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError): + validate(schema=schema, instance=instance) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "some_schema", + ], + + exit_code=1, + stderr=":12 is not valid under any of the given schemas._-_.12:", + ) + + def test_instance_is_invalid_JSON(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}", some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse 'some_instance': {} + """.format(_message_for(instance)), + ) + + def test_instance_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict( + some_schema="{}", + some_instance="not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_instance)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_instance_is_invalid_JSON_on_stdin(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO(instance), + + argv=["some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse : {} + """.format(_message_for(instance)), + ) + + def test_instance_is_invalid_JSON_on_stdin_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="{}"), + stdin=StringIO("not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "()===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_schema_is_invalid_JSON(self): + schema = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema=schema), + + argv=["some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse 'some_schema': {} + """.format(_message_for(schema)), + ) + + def test_schema_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + + def test_schema_and_instance_are_both_invalid_JSON(self): + """ + Only the schema error is reported, as we abort immediately. + """ + schema, instance = "not valid JSON!", "also not valid JSON!" + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + + argv=["some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse 'some_schema': {} + """.format(_message_for(schema)), + ) + + def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self): + """ + Only the schema error is reported, as we abort immediately. + """ + stdout, stderr = self.run_cli( + files=dict( + some_schema="not valid JSON!", + some_instance="also not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_instance", stderr) + + def test_instance_does_not_exist(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=["-i", "nonexisting_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 'nonexisting_instance' does not exist. + """, + ) + + def test_instance_does_not_exist_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_instance)=== + + 'nonexisting_instance' does not exist. + ----------------------------- + """, + ) + + def test_schema_does_not_exist(self): + self.assertOutputs( + argv=["nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_schema_does_not_exist_pretty_output(self): + self.assertOutputs( + argv=["--output", "pretty", "nonexisting_schema"], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_neither_instance_nor_schema_exist(self): + self.assertOutputs( + argv=["-i", "nonexisting_instance", "nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_neither_instance_nor_schema_exist_pretty_output(self): + self.assertOutputs( + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "nonexisting_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_successful_validation(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_of_stdin(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_stdin_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["--output", "pretty", "some_schema"], + stdout="===[SUCCESS]===()===\n", + stderr="", + ) + + def test_successful_validation_of_just_the_schema(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_just_the_schema_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance="1"), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + stdout="", + stderr="", + ) + + def test_unsuccessful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance='"1"'), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + exit_code=1, + stdout="", + stderr="1: '1' is not of type 'integer'\n", + ) + + def test_nonexistent_file_with_explicit_base_uri(self): + schema = '{"$ref": "someNonexistentFile.json#definitions/num"}' + instance = "1" + + with self.assertRaises(RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", Path.cwd().as_uri(), + "some_schema", + ], + ) + error = str(e.exception) + self.assertIn(f"{os.sep}someNonexistentFile.json'", error) + + def test_invalid_exlicit_base_uri(self): + schema = '{"$ref": "foo.json#definitions/num"}' + instance = "1" + + with self.assertRaises(RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", "not@UR1", + "some_schema", + ], + ) + error = str(e.exception) + self.assertEqual( + error, "unknown url type: 'foo.json'", + ) + + def test_it_validates_using_the_latest_validator_when_unspecified(self): + # There isn't a better way now I can think of to ensure that the + # latest version was used, given that the call to validator_for + # is hidden inside the CLI, so guard that that's the case, and + # this test will have to be updated when versions change until + # we can think of a better way to ensure this behavior. + self.assertIs(Draft202012Validator, _LATEST_VERSION) + + self.assertOutputs( + files=dict(some_schema='{"const": "check"}', some_instance='"a"'), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="a: 'check' was expected\n", + ) + + def test_it_validates_using_draft7_when_specified(self): + """ + Specifically, `const` validation applies for Draft 7. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-07/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="foo: 'check' was expected\n", + ) + + def test_it_validates_using_draft4_when_specified(self): + """ + Specifically, `const` validation *does not* apply for Draft 4. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-04/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + +class TestParser(TestCase): + + FakeValidator = fake_validator() + + def test_find_validator_by_fully_qualified_object_name(self): + arguments = cli.parse_args( + [ + "--validator", + "jsonschema.tests.test_cli.TestParser.FakeValidator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], self.FakeValidator) + + def test_find_validator_in_jsonschema(self): + arguments = cli.parse_args( + [ + "--validator", "Draft4Validator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], Draft4Validator) + + def cli_output_for(self, *argv): + stdout, stderr = StringIO(), StringIO() + with redirect_stdout(stdout), redirect_stderr(stderr): + with self.assertRaises(SystemExit): + cli.parse_args(argv) + return stdout.getvalue(), stderr.getvalue() + + def test_unknown_output(self): + stdout, stderr = self.cli_output_for( + "--output", "foo", + "mem://some/schema", + ) + self.assertIn("invalid choice: 'foo'", stderr) + self.assertFalse(stdout) + + def test_useless_error_format(self): + stdout, stderr = self.cli_output_for( + "--output", "pretty", + "--error-format", "foo", + "mem://some/schema", + ) + self.assertIn( + "--error-format can only be used with --output plain", + stderr, + ) + self.assertFalse(stdout) + + +class TestCLIIntegration(TestCase): + def test_license(self): + output = subprocess.check_output( + [sys.executable, "-m", "pip", "show", "jsonschema"], + stderr=subprocess.STDOUT, + ) + self.assertIn(b"License: MIT", output) + + def test_version(self): + version = subprocess.check_output( + [sys.executable, "-m", "jsonschema", "--version"], + stderr=subprocess.STDOUT, + ) + version = version.decode("utf-8").strip() + self.assertEqual(version, metadata.version("jsonschema")) + + def test_no_arguments_shows_usage_notes(self): + output = subprocess.check_output( + [sys.executable, "-m", "jsonschema"], + stderr=subprocess.STDOUT, + ) + output_for_help = subprocess.check_output( + [sys.executable, "-m", "jsonschema", "--help"], + stderr=subprocess.STDOUT, + ) + self.assertEqual(output, output_for_help) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_deprecations.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_deprecations.py new file mode 100644 index 0000000..58fd050 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_deprecations.py @@ -0,0 +1,123 @@ +from unittest import TestCase + +from jsonschema import validators + + +class TestDeprecations(TestCase): + def test_version(self): + """ + As of v4.0.0, __version__ is deprecated in favor of importlib.metadata. + """ + + with self.assertWarns(DeprecationWarning) as w: + from jsonschema import __version__ # noqa + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Accessing jsonschema.__version__ is deprecated", + ), + ) + + def test_validators_ErrorTree(self): + """ + As of v4.0.0, importing ErrorTree from jsonschema.validators is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + with self.assertWarns(DeprecationWarning) as w: + from jsonschema.validators import ErrorTree # noqa + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Importing ErrorTree from jsonschema.validators is deprecated", + ), + ) + + def test_validators_validators(self): + """ + As of v4.0.0, accessing jsonschema.validators.validators is + deprecated. + """ + + with self.assertWarns(DeprecationWarning) as w: + value = validators.validators + self.assertEqual(value, validators._VALIDATORS) + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Accessing jsonschema.validators.validators is deprecated", + ), + ) + + def test_validators_meta_schemas(self): + """ + As of v4.0.0, accessing jsonschema.validators.meta_schemas is + deprecated. + """ + + with self.assertWarns(DeprecationWarning) as w: + value = validators.meta_schemas + self.assertEqual(value, validators._META_SCHEMAS) + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Accessing jsonschema.validators.meta_schemas is deprecated", + ), + ) + + def test_RefResolver_in_scope(self): + """ + As of v4.0.0, RefResolver.in_scope is deprecated. + """ + + resolver = validators.RefResolver.from_schema({}) + with self.assertWarns(DeprecationWarning) as w: + with resolver.in_scope("foo"): + pass + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "jsonschema.RefResolver.in_scope is deprecated ", + ), + ) + + def test_Validator_is_valid_two_arguments(self): + """ + As of v4.0.0, calling is_valid with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + with self.assertWarns(DeprecationWarning) as w: + result = validator.is_valid("foo", {"type": "number"}) + + self.assertFalse(result) + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Passing a schema to Validator.is_valid is deprecated ", + ), + ) + + def test_Validator_iter_errors_two_arguments(self): + """ + As of v4.0.0, calling iter_errors with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + with self.assertWarns(DeprecationWarning) as w: + error, = validator.iter_errors("foo", {"type": "number"}) + + self.assertEqual(error.validator, "type") + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Passing a schema to Validator.iter_errors is deprecated ", + ), + ) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_exceptions.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_exceptions.py new file mode 100644 index 0000000..153b72e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_exceptions.py @@ -0,0 +1,475 @@ +from unittest import TestCase +import textwrap + +from jsonschema import Draft4Validator, exceptions + + +class TestBestMatch(TestCase): + def best_match(self, errors): + errors = list(errors) + best = exceptions.best_match(errors) + reversed_best = exceptions.best_match(reversed(errors)) + msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}" + self.assertEqual( + best._contents(), reversed_best._contents(), + msg=msg.format(best, reversed_best), + ) + return best + + def test_shallower_errors_are_better_matches(self): + validator = Draft4Validator( + { + "properties": { + "foo": { + "minProperties": 2, + "properties": {"bar": {"type": "object"}}, + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo": {"bar": []}})) + self.assertEqual(best.validator, "minProperties") + + def test_oneOf_and_anyOf_are_weak_matches(self): + """ + A property you *must* match is probably better than one you have to + match a part of. + """ + + validator = Draft4Validator( + { + "minProperties": 2, + "anyOf": [{"type": "string"}, {"type": "number"}], + "oneOf": [{"type": "string"}, {"type": "number"}], + }, + ) + best = self.best_match(validator.iter_errors({})) + self.assertEqual(best.validator, "minProperties") + + def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self): + """ + If the most relevant error is an anyOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + validator = Draft4Validator( + { + "properties": { + "foo": { + "anyOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo": {"bar": 12}})) + self.assertEqual(best.validator_value, "array") + + def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self): + """ + If the most relevant error is an oneOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + validator = Draft4Validator( + { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo": {"bar": 12}})) + self.assertEqual(best.validator_value, "array") + + def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self): + """ + Now, if the error is allOf, we traverse but select the *most* relevant + error from the context, because all schemas here must match anyways. + """ + + validator = Draft4Validator( + { + "properties": { + "foo": { + "allOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo": {"bar": 12}})) + self.assertEqual(best.validator_value, "string") + + def test_nested_context_for_oneOf(self): + validator = Draft4Validator( + { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + { + "oneOf": [ + {"type": "string"}, + { + "properties": { + "bar": {"type": "array"}, + }, + }, + ], + }, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo": {"bar": 12}})) + self.assertEqual(best.validator_value, "array") + + def test_one_error(self): + validator = Draft4Validator({"minProperties": 2}) + error, = validator.iter_errors({}) + self.assertEqual( + exceptions.best_match(validator.iter_errors({})).validator, + "minProperties", + ) + + def test_no_errors(self): + validator = Draft4Validator({}) + self.assertIsNone(exceptions.best_match(validator.iter_errors({}))) + + +class TestByRelevance(TestCase): + def test_short_paths_are_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=["baz"]) + deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"]) + match = max([shallow, deep], key=exceptions.relevance) + self.assertIs(match, shallow) + + match = max([deep, shallow], key=exceptions.relevance) + self.assertIs(match, shallow) + + def test_global_errors_are_even_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=[]) + deep = exceptions.ValidationError("Oh yes!", path=["foo"]) + + errors = sorted([shallow, deep], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + errors = sorted([deep, shallow], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + def test_weak_validators_are_lower_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + + best_match = exceptions.by_relevance(weak="a") + + match = max([weak, normal], key=best_match) + self.assertIs(match, normal) + + match = max([normal, weak], key=best_match) + self.assertIs(match, normal) + + def test_strong_validators_are_higher_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + strong = exceptions.ValidationError("Oh fine!", path=[], validator="c") + + best_match = exceptions.by_relevance(weak="a", strong="c") + + match = max([weak, normal, strong], key=best_match) + self.assertIs(match, strong) + + match = max([strong, normal, weak], key=best_match) + self.assertIs(match, strong) + + +class TestErrorTree(TestCase): + def test_it_knows_how_many_total_errors_it_contains(self): + # FIXME: https://github.com/Julian/jsonschema/issues/442 + errors = [ + exceptions.ValidationError("Something", validator=i) + for i in range(8) + ] + tree = exceptions.ErrorTree(errors) + self.assertEqual(tree.total_errors, 8) + + def test_it_contains_an_item_if_the_item_had_an_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertIn("bar", tree) + + def test_it_does_not_contain_an_item_if_the_item_had_no_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertNotIn("foo", tree) + + def test_validators_that_failed_appear_in_errors_dict(self): + error = exceptions.ValidationError("a message", validator="foo") + tree = exceptions.ErrorTree([error]) + self.assertEqual(tree.errors, {"foo": error}) + + def test_it_creates_a_child_tree_for_each_nested_path(self): + errors = [ + exceptions.ValidationError("a bar message", path=["bar"]), + exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]), + ] + tree = exceptions.ErrorTree(errors) + self.assertIn(0, tree["bar"]) + self.assertNotIn(1, tree["bar"]) + + def test_children_have_their_errors_dicts_built(self): + e1, e2 = ( + exceptions.ValidationError("1", validator="foo", path=["bar", 0]), + exceptions.ValidationError("2", validator="quux", path=["bar", 0]), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2}) + + def test_multiple_errors_with_instance(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + exceptions.ErrorTree([e1, e2]) + + def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self): + error = exceptions.ValidationError("123", validator="foo", instance=[]) + tree = exceptions.ErrorTree([error]) + + with self.assertRaises(IndexError): + tree[0] + + def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self): + """ + If a validator is dumb (like :validator:`required` in draft 3) and + refers to a path that isn't in the instance, the tree still properly + returns a subtree for that path. + """ + + error = exceptions.ValidationError( + "a message", validator="foo", instance={}, path=["foo"], + ) + tree = exceptions.ErrorTree([error]) + self.assertIsInstance(tree["foo"], exceptions.ErrorTree) + + def test_repr(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(repr(tree), "") + + +class TestErrorInitReprStr(TestCase): + def make_error(self, **kwargs): + defaults = dict( + message="hello", + validator="type", + validator_value="string", + instance=5, + schema={"type": "string"}, + ) + defaults.update(kwargs) + return exceptions.ValidationError(**defaults) + + def assertShows(self, expected, **kwargs): + expected = textwrap.dedent(expected).rstrip("\n") + + error = self.make_error(**kwargs) + message_line, _, rest = str(error).partition("\n") + self.assertEqual(message_line, error.message) + self.assertEqual(rest, expected) + + def test_it_calls_super_and_sets_args(self): + error = self.make_error() + self.assertGreater(len(error.args), 1) + + def test_repr(self): + self.assertEqual( + repr(exceptions.ValidationError(message="Hello!")), + "", + ) + + def test_unset_error(self): + error = exceptions.ValidationError("message") + self.assertEqual(str(error), "message") + + kwargs = { + "validator": "type", + "validator_value": "string", + "instance": 5, + "schema": {"type": "string"}, + } + # Just the message should show if any of the attributes are unset + for attr in kwargs: + k = dict(kwargs) + del k[attr] + error = exceptions.ValidationError("message", **k) + self.assertEqual(str(error), "message") + + def test_empty_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance: + 5 + """, + path=[], + schema_path=[], + ) + + def test_one_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance[0]: + 5 + """, + path=[0], + schema_path=["items"], + ) + + def test_multiple_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema['items'][0]: + {'type': 'string'} + + On instance[0]['a']: + 5 + """, + path=[0, "a"], + schema_path=["items", 0, 1], + ) + + def test_uses_pprint(self): + self.assertShows( + """ + Failed validating 'maxLength' in schema: + {0: 0, + 1: 1, + 2: 2, + 3: 3, + 4: 4, + 5: 5, + 6: 6, + 7: 7, + 8: 8, + 9: 9, + 10: 10, + 11: 11, + 12: 12, + 13: 13, + 14: 14, + 15: 15, + 16: 16, + 17: 17, + 18: 18, + 19: 19} + + On instance: + [0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24] + """, + instance=list(range(25)), + schema=dict(zip(range(20), range(20))), + validator="maxLength", + ) + + def test_str_works_with_instances_having_overriden_eq_operator(self): + """ + Check for https://github.com/Julian/jsonschema/issues/164 which + rendered exceptions unusable when a `ValidationError` involved + instances with an `__eq__` method that returned truthy values. + """ + + class DontEQMeBro(object): + def __eq__(this, other): # pragma: no cover + self.fail("Don't!") + + def __ne__(this, other): # pragma: no cover + self.fail("Don't!") + + instance = DontEQMeBro() + error = exceptions.ValidationError( + "a message", + validator="foo", + instance=instance, + validator_value="some", + schema="schema", + ) + self.assertIn(repr(instance), str(error)) + + +class TestHashable(TestCase): + def test_hashable(self): + set([exceptions.ValidationError("")]) + set([exceptions.SchemaError("")]) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_format.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_format.py new file mode 100644 index 0000000..1846cb2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_format.py @@ -0,0 +1,107 @@ +""" +Tests for the parts of jsonschema related to the :validator:`format` property. +""" + +from unittest import TestCase + +from jsonschema import FormatChecker, FormatError, ValidationError +from jsonschema.validators import Draft4Validator + +BOOM = ValueError("Boom!") +BANG = ZeroDivisionError("Bang!") + + +def boom(thing): + if thing == "bang": + raise BANG + raise BOOM + + +class TestFormatChecker(TestCase): + def test_it_can_validate_no_formats(self): + checker = FormatChecker(formats=()) + self.assertFalse(checker.checkers) + + def test_it_raises_a_key_error_for_unknown_formats(self): + with self.assertRaises(KeyError): + FormatChecker(formats=["o noes"]) + + def test_it_can_register_cls_checkers(self): + original = dict(FormatChecker.checkers) + self.addCleanup(FormatChecker.checkers.pop, "boom") + FormatChecker.cls_checks("boom")(boom) + self.assertEqual( + FormatChecker.checkers, + dict(original, boom=(boom, ())), + ) + + def test_it_can_register_checkers(self): + checker = FormatChecker() + checker.checks("boom")(boom) + self.assertEqual( + checker.checkers, + dict(FormatChecker.checkers, boom=(boom, ())), + ) + + def test_it_catches_registered_errors(self): + checker = FormatChecker() + checker.checks("boom", raises=type(BOOM))(boom) + + with self.assertRaises(FormatError) as cm: + checker.check(instance=12, format="boom") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + + # Unregistered errors should not be caught + with self.assertRaises(type(BANG)): + checker.check(instance="bang", format="boom") + + def test_format_error_causes_become_validation_error_causes(self): + checker = FormatChecker() + checker.checks("boom", raises=ValueError)(boom) + validator = Draft4Validator({"format": "boom"}, format_checker=checker) + + with self.assertRaises(ValidationError) as cm: + validator.validate("BOOM") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + + def test_format_checkers_come_with_defaults(self): + # This is bad :/ but relied upon. + # The docs for quite awhile recommended people do things like + # validate(..., format_checker=FormatChecker()) + # We should change that, but we can't without deprecation... + checker = FormatChecker() + with self.assertRaises(FormatError): + checker.check(instance="not-an-ipv4", format="ipv4") + + def test_repr(self): + checker = FormatChecker(formats=()) + checker.checks("foo")(lambda thing: True) # pragma: no cover + checker.checks("bar")(lambda thing: True) # pragma: no cover + checker.checks("baz")(lambda thing: True) # pragma: no cover + self.assertEqual( + repr(checker), + "", + ) + + def test_duration_format(self): + try: + from jsonschema._format import is_duration # noqa: F401 + except ImportError: # pragma: no cover + pass + else: + checker = FormatChecker() + self.assertTrue(checker.conforms(1, "duration")) + self.assertTrue(checker.conforms("P4Y", "duration")) + self.assertFalse(checker.conforms("test", "duration")) + + def test_uuid_format(self): + checker = FormatChecker() + self.assertTrue(checker.conforms(1, "uuid")) + self.assertTrue( + checker.conforms("6e6659ec-4503-4428-9f03-2e2ea4d6c278", "uuid"), + ) + self.assertFalse(checker.conforms("test", "uuid")) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_jsonschema_test_suite.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_jsonschema_test_suite.py new file mode 100644 index 0000000..c26da0a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_jsonschema_test_suite.py @@ -0,0 +1,447 @@ +""" +Test runner for the JSON Schema official test suite + +Tests comprehensive correctness of each draft's validator. + +See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details. +""" + +import sys + +from jsonschema import ( + Draft3Validator, + Draft4Validator, + Draft6Validator, + Draft7Validator, + Draft201909Validator, + Draft202012Validator, + draft3_format_checker, + draft4_format_checker, + draft6_format_checker, + draft7_format_checker, + draft201909_format_checker, + draft202012_format_checker, +) +from jsonschema.tests._helpers import bug +from jsonschema.tests._suite import Suite + +SUITE = Suite() +DRAFT3 = SUITE.version(name="draft3") +DRAFT4 = SUITE.version(name="draft4") +DRAFT6 = SUITE.version(name="draft6") +DRAFT7 = SUITE.version(name="draft7") +DRAFT201909 = SUITE.version(name="draft2019-09") +DRAFT202012 = SUITE.version(name="draft2020-12") + + +def skip(message, **kwargs): + def skipper(test): + if all(value == getattr(test, attr) for attr, value in kwargs.items()): + return message + return skipper + + +def missing_format(checker): + def missing_format(test): # pragma: no cover + schema = test.schema + if ( + schema is True + or schema is False + or "format" not in schema + or schema["format"] in checker.checkers + or test.valid + ): + return + + return "Format checker {0!r} not found.".format(schema["format"]) + return missing_format + + +def complex_email_validation(test): + if test.subject != "email": + return + + message = "Complex email validation is (intentionally) unsupported." + return skip( + message=message, + description="dot after local part is not valid", + )(test) or skip( + message=message, + description="dot before local part is not valid", + )(test) or skip( + message=message, + description="two subsequent dots inside local part are not valid", + )(test) + + +is_narrow_build = sys.maxunicode == 2 ** 16 - 1 +if is_narrow_build: # pragma: no cover + message = "Not running surrogate Unicode case, this Python is narrow." + + def narrow_unicode_build(test): # pragma: no cover + return skip( + message=message, + description=( + "one supplementary Unicode code point is not long enough" + ), + )(test) or skip( + message=message, + description="two supplementary Unicode code points is long enough", + )(test) +else: + def narrow_unicode_build(test): # pragma: no cover + return + + +if sys.version_info < (3, 9): # pragma: no cover + message = "Rejecting leading zeros is 3.9+" + allowed_leading_zeros = skip( + message=message, + subject="ipv4", + description=( + "leading zeroes should be rejected, as they are treated as octals" + ), + ) +else: + def allowed_leading_zeros(test): # pragma: no cover + return + + +def leap_second(test): + message = "Leap seconds are unsupported." + return skip( + message=message, + subject="time", + description="a valid time string with leap second", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second, Zulu", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second with offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, zero time-offset", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, UTC", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, with minus offset", + )(test) + + +TestDraft3 = DRAFT3.to_unittest_testcase( + DRAFT3.tests(), + DRAFT3.format_tests(), + DRAFT3.optional_tests_of(name="bignum"), + DRAFT3.optional_tests_of(name="non-bmp-regex"), + DRAFT3.optional_tests_of(name="zeroTerminatedFloats"), + Validator=Draft3Validator, + format_checker=draft3_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or missing_format(draft3_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling id from changing the base uri" + ), + )(test) + ), +) + + +TestDraft4 = DRAFT4.to_unittest_testcase( + DRAFT4.tests(), + DRAFT4.format_tests(), + DRAFT4.optional_tests_of(name="bignum"), + DRAFT4.optional_tests_of(name="float-overflow"), + DRAFT4.optional_tests_of(name="non-bmp-regex"), + DRAFT4.optional_tests_of(name="zeroTerminatedFloats"), + Validator=Draft4Validator, + format_checker=draft4_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(draft4_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(), + subject="ref", + case_description="Recursive references between schemas", + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "Location-independent identifier with " + "base URI change in subschema" + ), + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling id from changing the base uri" + ), + )(test) + or skip( + message=bug(371), + subject="id", + description="match $ref to id", + )(test) + or skip( + message=bug(371), + subject="id", + description="no match on enum or $ref to id", + )(test) + or skip( + message=bug(), + subject="refRemote", + case_description="base URI change - change folder in subschema", + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + ), +) + + +TestDraft6 = DRAFT6.to_unittest_testcase( + DRAFT6.tests(), + DRAFT6.format_tests(), + DRAFT6.optional_tests_of(name="bignum"), + DRAFT6.optional_tests_of(name="float-overflow"), + DRAFT6.optional_tests_of(name="non-bmp-regex"), + Validator=Draft6Validator, + format_checker=draft6_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(draft6_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "Location-independent identifier with " + "base URI change in subschema" + ), + )(test) + or skip( + message=bug(), + subject="refRemote", + case_description="base URI change - change folder in subschema", + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling $id from changing the base uri" + ), + )(test) + ), +) + + +TestDraft7 = DRAFT7.to_unittest_testcase( + DRAFT7.tests(), + DRAFT7.format_tests(), + DRAFT7.optional_tests_of(name="bignum"), + DRAFT7.optional_tests_of(name="content"), + DRAFT7.optional_tests_of(name="float-overflow"), + DRAFT7.optional_tests_of(name="non-bmp-regex"), + Validator=Draft7Validator, + format_checker=draft7_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(draft7_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "Location-independent identifier with " + "base URI change in subschema" + ), + )(test) + or skip( + message=bug(), + subject="refRemote", + case_description="base URI change - change folder in subschema", + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling $id from changing the base uri" + ), + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "$id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + or skip( + message=bug(593), + subject="content", + valid=False, + case_description=( + "validation of string-encoded content based on media type" + ), + )(test) + or skip( + message=bug(593), + subject="content", + valid=False, + case_description="validation of binary string-encoding", + )(test) + or skip( + message=bug(593), + subject="content", + valid=False, + case_description=( + "validation of binary-encoded media type documents" + ), + )(test) + ), +) + + +TestDraft201909 = DRAFT201909.to_unittest_testcase( + DRAFT201909.tests(), + DRAFT201909.optional_tests_of(name="bignum"), + DRAFT201909.optional_tests_of(name="float-overflow"), + DRAFT201909.optional_tests_of(name="non-bmp-regex"), + DRAFT201909.optional_tests_of(name="refOfUnknownKeyword"), + Validator=Draft201909Validator, + skip=lambda test: ( + skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + )(test) + or skip( + message="dynamicRef support isn't working yet.", + subject="recursiveRef", + )(test) + or skip( + message="These tests depends on dynamicRef working.", + subject="anchor", + case_description="same $anchor with different base uri", + )(test) + or skip( + message="Vocabulary support is not yet present.", + subject="vocabulary", + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "$id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + ), +) + + +TestDraft201909Format = DRAFT201909.to_unittest_testcase( + DRAFT201909.format_tests(), + Validator=Draft201909Validator, + format_checker=draft201909_format_checker, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(draft201909_format_checker)(test) + or complex_email_validation(test) + ), +) + + +TestDraft202012 = DRAFT202012.to_unittest_testcase( + DRAFT202012.tests(), + DRAFT202012.optional_tests_of(name="bignum"), + DRAFT202012.optional_tests_of(name="float-overflow"), + DRAFT202012.optional_tests_of(name="non-bmp-regex"), + DRAFT202012.optional_tests_of(name="refOfUnknownKeyword"), + Validator=Draft202012Validator, + skip=lambda test: ( + narrow_unicode_build(test) + or skip( + message="dynamicRef support isn't working yet.", + subject="dynamicRef", + )(test) + or skip( + message="These tests depends on dynamicRef working.", + subject="defs", + )(test) + or skip( + message="These tests depends on dynamicRef working.", + subject="anchor", + case_description="same $anchor with different base uri", + )(test) + or skip( + message="Vocabulary support is not yet present.", + subject="vocabulary", + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "$id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + ), +) + + +TestDraft202012Format = DRAFT202012.to_unittest_testcase( + DRAFT202012.format_tests(), + Validator=Draft202012Validator, + format_checker=draft202012_format_checker, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(draft202012_format_checker)(test) + or complex_email_validation(test) + ), +) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_types.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_types.py new file mode 100644 index 0000000..3fd1a70 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_types.py @@ -0,0 +1,217 @@ +""" +Tests for the `TypeChecker`-based type interface. + +The actual correctness of the type checking is handled in +`test_jsonschema_test_suite`; these tests check that TypeChecker +functions correctly at a more granular level. +""" +from collections import namedtuple +from unittest import TestCase + +from jsonschema import ValidationError, _validators +from jsonschema._types import TypeChecker +from jsonschema.exceptions import UndefinedTypeCheck, UnknownType +from jsonschema.validators import Draft202012Validator, extend + + +def equals_2(checker, instance): + return instance == 2 + + +def is_namedtuple(instance): + return isinstance(instance, tuple) and getattr(instance, "_fields", None) + + +def is_object_or_named_tuple(checker, instance): + if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"): + return True + return is_namedtuple(instance) + + +class TestTypeChecker(TestCase): + def test_is_type(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual( + ( + checker.is_type(instance=2, type="two"), + checker.is_type(instance="bar", type="two"), + ), + (True, False), + ) + + def test_is_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as e: + TypeChecker().is_type(4, "foobar") + self.assertIn( + "'foobar' is unknown to this type checker", + str(e.exception), + ) + self.assertTrue( + e.exception.__suppress_context__, + msg="Expected the internal KeyError to be hidden.", + ) + + def test_checks_can_be_added_at_init(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual(checker, TypeChecker().redefine("two", equals_2)) + + def test_redefine_existing_type(self): + self.assertEqual( + TypeChecker().redefine("two", object()).redefine("two", equals_2), + TypeChecker().redefine("two", equals_2), + ) + + def test_remove(self): + self.assertEqual( + TypeChecker({"two": equals_2}).remove("two"), + TypeChecker(), + ) + + def test_remove_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as context: + TypeChecker().remove("foobar") + self.assertIn("foobar", str(context.exception)) + + def test_redefine_many(self): + self.assertEqual( + TypeChecker().redefine_many({"foo": int, "bar": str}), + TypeChecker().redefine("foo", int).redefine("bar", str), + ) + + def test_remove_multiple(self): + self.assertEqual( + TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"), + TypeChecker(), + ) + + def test_type_check_can_raise_key_error(self): + """ + Make sure no one writes: + + try: + self._type_checkers[type](...) + except KeyError: + + ignoring the fact that the function itself can raise that. + """ + + error = KeyError("Stuff") + + def raises_keyerror(checker, instance): + raise error + + with self.assertRaises(KeyError) as context: + TypeChecker({"foo": raises_keyerror}).is_type(4, "foo") + + self.assertIs(context.exception, error) + + +class TestCustomTypes(TestCase): + def test_simple_type_can_be_extended(self): + def int_or_str_int(checker, instance): + if not isinstance(instance, (int, str)): + return False + try: + int(instance) + except ValueError: + return False + return True + + CustomValidator = extend( + Draft202012Validator, + type_checker=Draft202012Validator.TYPE_CHECKER.redefine( + "integer", int_or_str_int, + ), + ) + validator = CustomValidator({"type": "integer"}) + + validator.validate(4) + validator.validate("4") + + with self.assertRaises(ValidationError): + validator.validate(4.4) + + with self.assertRaises(ValidationError): + validator.validate("foo") + + def test_object_can_be_extended(self): + schema = {"type": "object"} + + Point = namedtuple("Point", ["x", "y"]) + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_require_custom_validators(self): + schema = {"type": "object", "required": ["x"]} + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Cannot handle required + with self.assertRaises(ValidationError): + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_can_handle_custom_validators(self): + schema = { + "type": "object", + "required": ["x"], + "properties": {"x": {"type": "integer"}}, + } + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + def coerce_named_tuple(fn): + def coerced(validator, value, instance, schema): + if is_namedtuple(instance): + instance = instance._asdict() + return fn(validator, value, instance, schema) + return coerced + + required = coerce_named_tuple(_validators.required) + properties = coerce_named_tuple(_validators.properties) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + validators={"required": required, "properties": properties}, + ) + + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Can now process required and properties + validator.validate(Point(x=4, y=5)) + + with self.assertRaises(ValidationError): + validator.validate(Point(x="not an integer", y=5)) + + # As well as still handle objects. + validator.validate({"x": 4, "y": 5}) + + with self.assertRaises(ValidationError): + validator.validate({"x": "not an integer", "y": 5}) + + def test_unknown_type(self): + with self.assertRaises(UnknownType) as e: + Draft202012Validator({}).is_type(12, "some unknown type") + self.assertIn("'some unknown type'", str(e.exception)) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_utils.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_utils.py new file mode 100644 index 0000000..4e542b9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_utils.py @@ -0,0 +1,124 @@ +from unittest import TestCase + +from jsonschema._utils import equal + + +class TestEqual(TestCase): + def test_none(self): + self.assertTrue(equal(None, None)) + + +class TestDictEqual(TestCase): + def test_equal_dictionaries(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b"} + self.assertTrue(equal(dict_1, dict_2)) + + def test_missing_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "x": "b"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_additional_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b", "x": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_missing_value(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_empty_dictionaries(self): + dict_1 = {} + dict_2 = {} + self.assertTrue(equal(dict_1, dict_2)) + + def test_one_none(self): + dict_1 = None + dict_2 = {"a": "b", "c": "d"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_same_item(self): + dict_1 = {"a": "b", "c": "d"} + self.assertTrue(equal(dict_1, dict_1)) + + def test_nested_equal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_dict_unequal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}} + self.assertFalse(equal(dict_1, dict_2)) + + def test_mixed_nested_equal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_list_unequal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]} + self.assertFalse(equal(dict_1, dict_2)) + + +class TestListEqual(TestCase): + def test_equal_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_unsorted_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["b", "b", "a"] + self.assertFalse(equal(list_1, list_2)) + + def test_first_list_larger(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b"] + self.assertFalse(equal(list_1, list_2)) + + def test_second_list_larger(self): + list_1 = ["a", "b"] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_unequal(self): + list_1 = ["a", "b", None] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + list_1 = ["a", "b", None] + list_2 = [None, "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_equal(self): + list_1 = ["a", None, "c"] + list_2 = ["a", None, "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_empty_list(self): + list_1 = [] + list_2 = [] + self.assertTrue(equal(list_1, list_2)) + + def test_one_none(self): + list_1 = None + list_2 = [] + self.assertFalse(equal(list_1, list_2)) + + def test_same_list(self): + list_1 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_1)) + + def test_equal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", ["b", "c"], "d"] + self.assertTrue(equal(list_1, list_2)) + + def test_unequal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", [], "c"] + self.assertFalse(equal(list_1, list_2)) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/tests/test_validators.py b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_validators.py new file mode 100644 index 0000000..8b69a69 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/tests/test_validators.py @@ -0,0 +1,2169 @@ +from __future__ import annotations + +from collections import deque, namedtuple +from contextlib import contextmanager +from decimal import Decimal +from io import BytesIO +from unittest import TestCase, mock +from urllib.request import pathname2url +import json +import os +import sys +import tempfile +import unittest +import warnings + +import attr + +from jsonschema import ( + FormatChecker, + TypeChecker, + exceptions, + protocols, + validators, +) +from jsonschema.tests._helpers import bug + + +def fail(validator, errors, instance, schema): + for each in errors: + each.setdefault("message", "You told me to fail!") + yield exceptions.ValidationError(**each) + + +class TestCreateAndExtend(TestCase): + def setUp(self): + self.addCleanup( + self.assertEqual, + validators._META_SCHEMAS, + dict(validators._META_SCHEMAS), + ) + + self.meta_schema = {"$id": "some://meta/schema"} + self.validators = {"fail": fail} + self.type_checker = TypeChecker() + self.Validator = validators.create( + meta_schema=self.meta_schema, + validators=self.validators, + type_checker=self.type_checker, + ) + + def test_attrs(self): + self.assertEqual( + ( + self.Validator.VALIDATORS, + self.Validator.META_SCHEMA, + self.Validator.TYPE_CHECKER, + ), ( + self.validators, + self.meta_schema, + self.type_checker, + ), + ) + + def test_init(self): + schema = {"fail": []} + self.assertEqual(self.Validator(schema).schema, schema) + + def test_iter_errors_successful(self): + schema = {"fail": []} + validator = self.Validator(schema) + + errors = list(validator.iter_errors("hello")) + self.assertEqual(errors, []) + + def test_iter_errors_one_error(self): + schema = {"fail": [{"message": "Whoops!"}]} + validator = self.Validator(schema) + + expected_error = exceptions.ValidationError( + "Whoops!", + instance="goodbye", + schema=schema, + validator="fail", + validator_value=[{"message": "Whoops!"}], + schema_path=deque(["fail"]), + ) + + errors = list(validator.iter_errors("goodbye")) + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0]._contents(), expected_error._contents()) + + def test_iter_errors_multiple_errors(self): + schema = { + "fail": [ + {"message": "First"}, + {"message": "Second!", "validator": "asdf"}, + {"message": "Third"}, + ], + } + validator = self.Validator(schema) + + errors = list(validator.iter_errors("goodbye")) + self.assertEqual(len(errors), 3) + + def test_if_a_version_is_provided_it_is_registered(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual(Validator.__name__, "MyVersionValidator") + self.assertEqual(Validator.__qualname__, "MyVersionValidator") + + def test_repr(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual( + repr(Validator({})), + "MyVersionValidator(schema={}, format_checker=None)", + ) + + def test_long_repr(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual( + repr(Validator({"a": list(range(1000))})), ( + "MyVersionValidator(schema={'a': [0, 1, 2, 3, 4, 5, ...]}, " + "format_checker=None)" + ), + ) + + def test_repr_no_version(self): + Validator = validators.create(meta_schema={}) + self.assertEqual( + repr(Validator({})), + "Validator(schema={}, format_checker=None)", + ) + + def test_dashes_are_stripped_from_validator_names(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="foo-bar", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual(Validator.__qualname__, "FooBarValidator") + + def test_if_a_version_is_not_provided_it_is_not_registered(self): + original = dict(validators._META_SCHEMAS) + validators.create(meta_schema={"id": "id"}) + self.assertEqual(validators._META_SCHEMAS, original) + + def test_validates_registers_meta_schema_id(self): + meta_schema_key = "meta schema id" + my_meta_schema = {"id": meta_schema_key} + + validators.create( + meta_schema=my_meta_schema, + version="my version", + id_of=lambda s: s.get("id", ""), + ) + self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) + + self.assertIn(meta_schema_key, validators._META_SCHEMAS) + + def test_validates_registers_meta_schema_draft6_id(self): + meta_schema_key = "meta schema $id" + my_meta_schema = {"$id": meta_schema_key} + + validators.create( + meta_schema=my_meta_schema, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) + + self.assertIn(meta_schema_key, validators._META_SCHEMAS) + + def test_create_default_types(self): + Validator = validators.create(meta_schema={}, validators=()) + self.assertTrue( + all( + Validator({}).is_type(instance=instance, type=type) + for type, instance in [ + ("array", []), + ("boolean", True), + ("integer", 12), + ("null", None), + ("number", 12.0), + ("object", {}), + ("string", "foo"), + ] + ), + ) + + def test_extend(self): + original = dict(self.Validator.VALIDATORS) + new = object() + + Extended = validators.extend( + self.Validator, + validators={"new": new}, + ) + self.assertEqual( + ( + Extended.VALIDATORS, + Extended.META_SCHEMA, + Extended.TYPE_CHECKER, + self.Validator.VALIDATORS, + ), ( + dict(original, new=new), + self.Validator.META_SCHEMA, + self.Validator.TYPE_CHECKER, + original, + ), + ) + + def test_extend_idof(self): + """ + Extending a validator preserves its notion of schema IDs. + """ + def id_of(schema): + return schema.get("__test__", self.Validator.ID_OF(schema)) + correct_id = "the://correct/id/" + meta_schema = { + "$id": "the://wrong/id/", + "__test__": correct_id, + } + Original = validators.create( + meta_schema=meta_schema, + validators=self.validators, + type_checker=self.type_checker, + id_of=id_of, + ) + self.assertEqual(Original.ID_OF(Original.META_SCHEMA), correct_id) + + Derived = validators.extend(Original) + self.assertEqual(Derived.ID_OF(Derived.META_SCHEMA), correct_id) + + +class TestValidationErrorMessages(TestCase): + def message_for(self, instance, schema, *args, **kwargs): + cls = kwargs.pop("cls", validators._LATEST_VERSION) + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + errors = list(validator.iter_errors(instance)) + self.assertTrue(errors, msg=f"No errors were raised for {instance!r}") + self.assertEqual( + len(errors), + 1, + msg=f"Expected exactly one error, found {errors!r}", + ) + return errors[0].message + + def test_single_type_failure(self): + message = self.message_for(instance=1, schema={"type": "string"}) + self.assertEqual(message, "1 is not of type 'string'") + + def test_single_type_list_failure(self): + message = self.message_for(instance=1, schema={"type": ["string"]}) + self.assertEqual(message, "1 is not of type 'string'") + + def test_multiple_type_failure(self): + types = "string", "object" + message = self.message_for(instance=1, schema={"type": list(types)}) + self.assertEqual(message, "1 is not of type 'string', 'object'") + + def test_object_without_title_type_failure(self): + type = {"type": [{"minimum": 3}]} + message = self.message_for( + instance=1, + schema={"type": [type]}, + cls=validators.Draft3Validator, + ) + self.assertEqual( + message, + "1 is not of type {'type': [{'minimum': 3}]}", + ) + + def test_object_with_named_type_failure(self): + schema = {"type": [{"name": "Foo", "minimum": 3}]} + message = self.message_for( + instance=1, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "1 is not of type 'Foo'") + + def test_minimum(self): + message = self.message_for(instance=1, schema={"minimum": 2}) + self.assertEqual(message, "1 is less than the minimum of 2") + + def test_maximum(self): + message = self.message_for(instance=1, schema={"maximum": 0}) + self.assertEqual(message, "1 is greater than the maximum of 0") + + def test_dependencies_single_element(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: on}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_dependencies_list_draft3(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: [on]}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_dependencies_list_draft7(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: [on]}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft7Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_additionalItems_single_failure(self): + message = self.message_for( + instance=[2], + schema={"items": [], "additionalItems": False}, + cls=validators.Draft3Validator, + ) + self.assertIn("(2 was unexpected)", message) + + def test_additionalItems_multiple_failures(self): + message = self.message_for( + instance=[1, 2, 3], + schema={"items": [], "additionalItems": False}, + cls=validators.Draft3Validator, + ) + self.assertIn("(1, 2, 3 were unexpected)", message) + + def test_additionalProperties_single_failure(self): + additional = "foo" + schema = {"additionalProperties": False} + message = self.message_for(instance={additional: 2}, schema=schema) + self.assertIn("('foo' was unexpected)", message) + + def test_additionalProperties_multiple_failures(self): + schema = {"additionalProperties": False} + message = self.message_for( + instance=dict.fromkeys(["foo", "bar"]), + schema=schema, + ) + + self.assertIn(repr("foo"), message) + self.assertIn(repr("bar"), message) + self.assertIn("were unexpected)", message) + + def test_const(self): + schema = {"const": 12} + message = self.message_for( + instance={"foo": "bar"}, + schema=schema, + ) + self.assertIn("12 was expected", message) + + def test_contains_draft_6(self): + schema = {"contains": {"const": 12}} + message = self.message_for( + instance=[2, {}, []], + schema=schema, + cls=validators.Draft6Validator, + ) + self.assertEqual( + message, + "None of [2, {}, []] are valid under the given schema", + ) + + def test_invalid_format_default_message(self): + checker = FormatChecker(formats=()) + checker.checks("thing")(lambda value: False) + + schema = {"format": "thing"} + message = self.message_for( + instance="bla", + schema=schema, + format_checker=checker, + ) + + self.assertIn(repr("bla"), message) + self.assertIn(repr("thing"), message) + self.assertIn("is not a", message) + + def test_additionalProperties_false_patternProperties(self): + schema = {"type": "object", + "additionalProperties": False, + "patternProperties": { + "^abc$": {"type": "string"}, + "^def$": {"type": "string"}, + }} + message = self.message_for( + instance={"zebra": 123}, + schema=schema, + cls=validators.Draft4Validator, + ) + self.assertEqual( + message, + "{} does not match any of the regexes: {}, {}".format( + repr("zebra"), repr("^abc$"), repr("^def$"), + ), + ) + message = self.message_for( + instance={"zebra": 123, "fish": 456}, + schema=schema, + cls=validators.Draft4Validator, + ) + self.assertEqual( + message, + "{}, {} do not match any of the regexes: {}, {}".format( + repr("fish"), repr("zebra"), repr("^abc$"), repr("^def$"), + ), + ) + + def test_False_schema(self): + message = self.message_for( + instance="something", + schema=False, + ) + self.assertEqual(message, "False schema does not allow 'something'") + + def test_multipleOf(self): + message = self.message_for( + instance=3, + schema={"multipleOf": 2}, + ) + self.assertEqual(message, "3 is not a multiple of 2") + + def test_minItems(self): + message = self.message_for(instance=[], schema={"minItems": 2}) + self.assertEqual(message, "[] is too short") + + def test_maxItems(self): + message = self.message_for(instance=[1, 2, 3], schema={"maxItems": 2}) + self.assertEqual(message, "[1, 2, 3] is too long") + + def test_prefixItems_with_items(self): + message = self.message_for( + instance=[1, 2, "foo", 5], + schema={"items": False, "prefixItems": [{}, {}]}, + ) + self.assertEqual(message, "Expected at most 2 items, but found 4") + + def test_minLength(self): + message = self.message_for( + instance="", + schema={"minLength": 2}, + ) + self.assertEqual(message, "'' is too short") + + def test_maxLength(self): + message = self.message_for( + instance="abc", + schema={"maxLength": 2}, + ) + self.assertEqual(message, "'abc' is too long") + + def test_pattern(self): + message = self.message_for( + instance="bbb", + schema={"pattern": "^a*$"}, + ) + self.assertEqual(message, "'bbb' does not match '^a*$'") + + def test_does_not_contain(self): + message = self.message_for( + instance=[], + schema={"contains": {"type": "string"}}, + ) + self.assertEqual( + message, + "[] does not contain items matching the given schema", + ) + + def test_contains_too_few(self): + message = self.message_for( + instance=["foo", 1], + schema={"contains": {"type": "string"}, "minContains": 2}, + ) + self.assertEqual( + message, + "Too few items match the given schema " + "(expected at least 2 but only 1 matched)", + ) + + def test_contains_too_few_both_constrained(self): + message = self.message_for( + instance=["foo", 1], + schema={ + "contains": {"type": "string"}, + "minContains": 2, + "maxContains": 4, + }, + ) + self.assertEqual( + message, + "Too few items match the given schema (expected at least 2 but " + "only 1 matched)", + ) + + def test_contains_too_many(self): + message = self.message_for( + instance=["foo", "bar", "baz"], + schema={"contains": {"type": "string"}, "maxContains": 2}, + ) + self.assertEqual( + message, + "Too many items match the given schema (expected at most 2)", + ) + + def test_contains_too_many_both_constrained(self): + message = self.message_for( + instance=["foo"] * 5, + schema={ + "contains": {"type": "string"}, + "minContains": 2, + "maxContains": 4, + }, + ) + self.assertEqual( + message, + "Too many items match the given schema (expected at most 4)", + ) + + def test_exclusiveMinimum(self): + message = self.message_for( + instance=3, + schema={"exclusiveMinimum": 5}, + ) + self.assertEqual( + message, + "3 is less than or equal to the minimum of 5", + ) + + def test_exclusiveMaximum(self): + message = self.message_for(instance=3, schema={"exclusiveMaximum": 2}) + self.assertEqual( + message, + "3 is greater than or equal to the maximum of 2", + ) + + def test_required(self): + message = self.message_for(instance={}, schema={"required": ["foo"]}) + self.assertEqual(message, "'foo' is a required property") + + def test_dependentRequired(self): + message = self.message_for( + instance={"foo": {}}, + schema={"dependentRequired": {"foo": ["bar"]}}, + ) + self.assertEqual(message, "'bar' is a dependency of 'foo'") + + def test_minProperties(self): + message = self.message_for(instance={}, schema={"minProperties": 2}) + self.assertEqual(message, "{} does not have enough properties") + + def test_maxProperties(self): + message = self.message_for( + instance={"a": {}, "b": {}, "c": {}}, + schema={"maxProperties": 2}, + ) + self.assertEqual( + message, + "{'a': {}, 'b': {}, 'c': {}} has too many properties", + ) + + def test_oneOf_matches_none(self): + message = self.message_for(instance={}, schema={"oneOf": [False]}) + self.assertEqual( + message, + "{} is not valid under any of the given schemas", + ) + + def test_oneOf_matches_too_many(self): + message = self.message_for(instance={}, schema={"oneOf": [True, True]}) + self.assertEqual(message, "{} is valid under each of True, True") + + def test_unevaluated_items(self): + schema = {"type": "array", "unevaluatedItems": False} + message = self.message_for(instance=["foo", "bar"], schema=schema) + self.assertIn( + message, + "Unevaluated items are not allowed ('foo', 'bar' were unexpected)", + ) + + def test_unevaluated_properties(self): + schema = {"type": "object", "unevaluatedProperties": False} + message = self.message_for( + instance={ + "foo": "foo", + "bar": "bar", + }, + schema=schema, + ) + self.assertEqual( + message, + "Unevaluated properties are not allowed " + "('foo', 'bar' were unexpected)", + ) + + +class TestValidationErrorDetails(TestCase): + # TODO: These really need unit tests for each individual validator, rather + # than just these higher level tests. + def test_anyOf(self): + instance = 5 + schema = { + "anyOf": [ + {"minimum": 20}, + {"type": "string"}, + ], + } + + validator = validators.Draft4Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "anyOf") + self.assertEqual(e.validator_value, schema["anyOf"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + self.assertEqual(e.json_path, "$") + + self.assertEqual(e.schema_path, deque(["anyOf"])) + self.assertEqual(e.relative_schema_path, deque(["anyOf"])) + self.assertEqual(e.absolute_schema_path, deque(["anyOf"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "minimum") + self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["anyOf"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.json_path, "$") + + self.assertEqual(e1.schema_path, deque([0, "minimum"])) + self.assertEqual(e1.relative_schema_path, deque([0, "minimum"])) + self.assertEqual( + e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]), + ) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "type") + self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"]) + self.assertEqual(e2.instance, instance) + self.assertEqual(e2.schema, schema["anyOf"][1]) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque([])) + self.assertEqual(e2.relative_path, deque([])) + self.assertEqual(e2.absolute_path, deque([])) + self.assertEqual(e2.json_path, "$") + + self.assertEqual(e2.schema_path, deque([1, "type"])) + self.assertEqual(e2.relative_schema_path, deque([1, "type"])) + self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"])) + + self.assertEqual(len(e2.context), 0) + + def test_type(self): + instance = {"foo": 1} + schema = { + "type": [ + {"type": "integer"}, + { + "type": "object", + "properties": {"foo": {"enum": [2]}}, + }, + ], + } + + validator = validators.Draft3Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "type") + self.assertEqual(e.validator_value, schema["type"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + self.assertEqual(e.json_path, "$") + + self.assertEqual(e.schema_path, deque(["type"])) + self.assertEqual(e.relative_schema_path, deque(["type"])) + self.assertEqual(e.absolute_schema_path, deque(["type"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e1.validator_value, schema["type"][0]["type"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["type"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.json_path, "$") + + self.assertEqual(e1.schema_path, deque([0, "type"])) + self.assertEqual(e1.relative_schema_path, deque([0, "type"])) + self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"])) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "enum") + self.assertEqual(e2.validator_value, [2]) + self.assertEqual(e2.instance, 1) + self.assertEqual(e2.schema, {"enum": [2]}) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque(["foo"])) + self.assertEqual(e2.relative_path, deque(["foo"])) + self.assertEqual(e2.absolute_path, deque(["foo"])) + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual( + e2.schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.relative_schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.absolute_schema_path, + deque(["type", 1, "properties", "foo", "enum"]), + ) + + self.assertFalse(e2.context) + + def test_single_nesting(self): + instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"} + schema = { + "properties": { + "foo": {"type": "string"}, + "bar": {"minItems": 2}, + "baz": {"maximum": 10, "enum": [2, 4, 6, 8]}, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["baz"])) + self.assertEqual(e3.path, deque(["baz"])) + self.assertEqual(e4.path, deque(["foo"])) + + self.assertEqual(e1.relative_path, deque(["bar"])) + self.assertEqual(e2.relative_path, deque(["baz"])) + self.assertEqual(e3.relative_path, deque(["baz"])) + self.assertEqual(e4.relative_path, deque(["foo"])) + + self.assertEqual(e1.absolute_path, deque(["bar"])) + self.assertEqual(e2.absolute_path, deque(["baz"])) + self.assertEqual(e3.absolute_path, deque(["baz"])) + self.assertEqual(e4.absolute_path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.baz") + self.assertEqual(e3.json_path, "$.baz") + self.assertEqual(e4.json_path, "$.foo") + + self.assertEqual(e1.validator, "minItems") + self.assertEqual(e2.validator, "enum") + self.assertEqual(e3.validator, "maximum") + self.assertEqual(e4.validator, "type") + + def test_multiple_nesting(self): + instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"] + schema = { + "type": "string", + "items": { + "type": ["string", "object"], + "properties": { + "foo": {"enum": [1, 3]}, + "bar": { + "type": "array", + "properties": { + "bar": {"required": True}, + "baz": {"minItems": 2}, + }, + }, + }, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4, e5, e6 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e2.path, deque([0])) + self.assertEqual(e3.path, deque([1, "bar"])) + self.assertEqual(e4.path, deque([1, "bar", "bar"])) + self.assertEqual(e5.path, deque([1, "bar", "baz"])) + self.assertEqual(e6.path, deque([1, "foo"])) + + self.assertEqual(e1.json_path, "$") + self.assertEqual(e2.json_path, "$[0]") + self.assertEqual(e3.json_path, "$[1].bar") + self.assertEqual(e4.json_path, "$[1].bar.bar") + self.assertEqual(e5.json_path, "$[1].bar.baz") + self.assertEqual(e6.json_path, "$[1].foo") + + self.assertEqual(e1.schema_path, deque(["type"])) + self.assertEqual(e2.schema_path, deque(["items", "type"])) + self.assertEqual( + list(e3.schema_path), ["items", "properties", "bar", "type"], + ) + self.assertEqual( + list(e4.schema_path), + ["items", "properties", "bar", "properties", "bar", "required"], + ) + self.assertEqual( + list(e5.schema_path), + ["items", "properties", "bar", "properties", "baz", "minItems"], + ) + self.assertEqual( + list(e6.schema_path), ["items", "properties", "foo", "enum"], + ) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "type") + self.assertEqual(e3.validator, "type") + self.assertEqual(e4.validator, "required") + self.assertEqual(e5.validator, "minItems") + self.assertEqual(e6.validator, "enum") + + def test_recursive(self): + schema = { + "definitions": { + "node": { + "anyOf": [{ + "type": "object", + "required": ["name", "children"], + "properties": { + "name": { + "type": "string", + }, + "children": { + "type": "object", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/node", + }, + }, + }, + }, + }], + }, + }, + "type": "object", + "required": ["root"], + "properties": {"root": {"$ref": "#/definitions/node"}}, + } + + instance = { + "root": { + "name": "root", + "children": { + "a": { + "name": "a", + "children": { + "ab": { + "name": "ab", + # missing "children" + }, + }, + }, + }, + }, + } + validator = validators.Draft4Validator(schema) + + e, = validator.iter_errors(instance) + self.assertEqual(e.absolute_path, deque(["root"])) + self.assertEqual( + e.absolute_schema_path, deque(["properties", "root", "anyOf"]), + ) + self.assertEqual(e.json_path, "$.root") + + e1, = e.context + self.assertEqual(e1.absolute_path, deque(["root", "children", "a"])) + self.assertEqual( + e1.absolute_schema_path, deque( + [ + "properties", + "root", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + ], + ), + ) + self.assertEqual(e1.json_path, "$.root.children.a") + + e2, = e1.context + self.assertEqual( + e2.absolute_path, deque( + ["root", "children", "a", "children", "ab"], + ), + ) + self.assertEqual( + e2.absolute_schema_path, deque( + [ + "properties", + "root", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + ], + ), + ) + self.assertEqual(e2.json_path, "$.root.children.a.children.ab") + + def test_additionalProperties(self): + instance = {"bar": "bar", "foo": 2} + schema = {"additionalProperties": {"type": "integer", "minimum": 5}} + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_patternProperties(self): + instance = {"bar": 1, "foo": 2} + schema = { + "patternProperties": { + "bar": {"type": "string"}, + "foo": {"minimum": 5}, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems(self): + instance = ["foo", 1] + schema = { + "items": [], + "additionalItems": {"type": "integer", "minimum": 5}, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([0])) + self.assertEqual(e2.path, deque([1])) + + self.assertEqual(e1.json_path, "$[0]") + self.assertEqual(e2.json_path, "$[1]") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems_with_items(self): + instance = ["foo", "bar", 1] + schema = { + "items": [{}], + "additionalItems": {"type": "integer", "minimum": 5}, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([1])) + self.assertEqual(e2.path, deque([2])) + + self.assertEqual(e1.json_path, "$[1]") + self.assertEqual(e2.json_path, "$[2]") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_propertyNames(self): + instance = {"foo": 12} + schema = {"propertyNames": {"not": {"const": "foo"}}} + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(instance) + + self.assertEqual(error.validator, "not") + self.assertEqual( + error.message, + "'foo' should not be valid under {'const': 'foo'}", + ) + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["propertyNames", "not"])) + + def test_if_then(self): + schema = { + "if": {"const": 12}, + "then": {"const": 13}, + } + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(12) + + self.assertEqual(error.validator, "const") + self.assertEqual(error.message, "13 was expected") + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["then", "const"])) + + def test_if_else(self): + schema = { + "if": {"const": 12}, + "else": {"const": 13}, + } + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(15) + + self.assertEqual(error.validator, "const") + self.assertEqual(error.message, "13 was expected") + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["else", "const"])) + + def test_boolean_schema_False(self): + validator = validators.Draft7Validator(False) + error, = validator.iter_errors(12) + + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "False schema does not allow 12", + None, + None, + 12, + False, + deque([]), + "$", + ), + ) + + def test_ref(self): + ref, schema = "someRef", {"additionalProperties": {"type": "integer"}} + validator = validators.Draft7Validator( + {"$ref": ref}, + resolver=validators.RefResolver("", {}, store={ref: schema}), + ) + error, = validator.iter_errors({"foo": "notAnInteger"}) + + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "'notAnInteger' is not of type 'integer'", + "type", + "integer", + "notAnInteger", + deque(["foo"]), + {"type": "integer"}, + deque(["additionalProperties", "type"]), + "$.foo", + ), + ) + + def test_prefixItems(self): + schema = {"prefixItems": [{"type": "string"}, {}, {}, {"maximum": 3}]} + validator = validators.Draft202012Validator(schema) + type_error, min_error = validator.iter_errors([1, 2, "foo", 5]) + self.assertEqual( + ( + type_error.message, + type_error.validator, + type_error.validator_value, + type_error.instance, + type_error.absolute_path, + type_error.schema, + type_error.schema_path, + type_error.json_path, + ), + ( + "1 is not of type 'string'", + "type", + "string", + 1, + deque([0]), + {"type": "string"}, + deque(["prefixItems", 0, "type"]), + "$[0]", + ), + ) + self.assertEqual( + ( + min_error.message, + min_error.validator, + min_error.validator_value, + min_error.instance, + min_error.absolute_path, + min_error.schema, + min_error.schema_path, + min_error.json_path, + ), + ( + "5 is greater than the maximum of 3", + "maximum", + 3, + 5, + deque([3]), + {"maximum": 3}, + deque(["prefixItems", 3, "maximum"]), + "$[3]", + ), + ) + + def test_prefixItems_with_items(self): + schema = { + "items": {"type": "string"}, + "prefixItems": [{}], + } + validator = validators.Draft202012Validator(schema) + e1, e2 = validator.iter_errors(["foo", 2, "bar", 4, "baz"]) + self.assertEqual( + ( + e1.message, + e1.validator, + e1.validator_value, + e1.instance, + e1.absolute_path, + e1.schema, + e1.schema_path, + e1.json_path, + ), + ( + "2 is not of type 'string'", + "type", + "string", + 2, + deque([1]), + {"type": "string"}, + deque(["items", "type"]), + "$[1]", + ), + ) + self.assertEqual( + ( + e2.message, + e2.validator, + e2.validator_value, + e2.instance, + e2.absolute_path, + e2.schema, + e2.schema_path, + e2.json_path, + ), + ( + "4 is not of type 'string'", + "type", + "string", + 4, + deque([3]), + {"type": "string"}, + deque(["items", "type"]), + "$[3]", + ), + ) + + def test_contains_too_many(self): + """ + `contains` + `maxContains` produces only one error, even if there are + many more incorrectly matching elements. + """ + schema = {"contains": {"type": "string"}, "maxContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors(["foo", 2, "bar", 4, "baz", "quux"]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "Too many items match the given schema (expected at most 2)", + "maxContains", + 2, + ["foo", 2, "bar", 4, "baz", "quux"], + deque([]), + {"contains": {"type": "string"}, "maxContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_contains_too_few(self): + schema = {"contains": {"type": "string"}, "minContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors(["foo", 2, 4]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + ( + "Too few items match the given schema " + "(expected at least 2 but only 1 matched)" + ), + "minContains", + 2, + ["foo", 2, 4], + deque([]), + {"contains": {"type": "string"}, "minContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_contains_none(self): + schema = {"contains": {"type": "string"}, "minContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors([2, 4]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "[2, 4] does not contain items matching the given schema", + "contains", + {"type": "string"}, + [2, 4], + deque([]), + {"contains": {"type": "string"}, "minContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_ref_sibling(self): + schema = { + "$defs": {"foo": {"required": ["bar"]}}, + "properties": { + "aprop": { + "$ref": "#/$defs/foo", + "required": ["baz"], + }, + }, + } + + validator = validators.Draft202012Validator(schema) + e1, e2 = validator.iter_errors({"aprop": {}}) + self.assertEqual( + ( + e1.message, + e1.validator, + e1.validator_value, + e1.instance, + e1.absolute_path, + e1.schema, + e1.schema_path, + e1.relative_schema_path, + e1.json_path, + ), + ( + "'bar' is a required property", + "required", + ["bar"], + {}, + deque(["aprop"]), + {"required": ["bar"]}, + deque(["properties", "aprop", "required"]), + deque(["properties", "aprop", "required"]), + "$.aprop", + ), + ) + self.assertEqual( + ( + e2.message, + e2.validator, + e2.validator_value, + e2.instance, + e2.absolute_path, + e2.schema, + e2.schema_path, + e2.relative_schema_path, + e2.json_path, + ), + ( + "'baz' is a required property", + "required", + ["baz"], + {}, + deque(["aprop"]), + {"$ref": "#/$defs/foo", "required": ["baz"]}, + deque(["properties", "aprop", "required"]), + deque(["properties", "aprop", "required"]), + "$.aprop", + ), + ) + + +class MetaSchemaTestsMixin(object): + # TODO: These all belong upstream + def test_invalid_properties(self): + with self.assertRaises(exceptions.SchemaError): + self.Validator.check_schema({"properties": {"test": object()}}) + + def test_minItems_invalid_string(self): + with self.assertRaises(exceptions.SchemaError): + # needs to be an integer + self.Validator.check_schema({"minItems": "1"}) + + def test_enum_allows_empty_arrays(self): + """ + Technically, all the spec says is they SHOULD have elements, not MUST. + + See https://github.com/Julian/jsonschema/issues/529. + """ + self.Validator.check_schema({"enum": []}) + + def test_enum_allows_non_unique_items(self): + """ + Technically, all the spec says is they SHOULD be unique, not MUST. + + See https://github.com/Julian/jsonschema/issues/529. + """ + self.Validator.check_schema({"enum": [12, 12]}) + + +class ValidatorTestMixin(MetaSchemaTestsMixin, object): + def test_it_implements_the_validator_protocol(self): + self.assertIsInstance(self.Validator({}), protocols.Validator) + + def test_valid_instances_are_valid(self): + schema, instance = self.valid + self.assertTrue(self.Validator(schema).is_valid(instance)) + + def test_invalid_instances_are_not_valid(self): + schema, instance = self.invalid + self.assertFalse(self.Validator(schema).is_valid(instance)) + + def test_non_existent_properties_are_ignored(self): + self.Validator({object(): object()}).validate(instance=object()) + + def test_it_creates_a_ref_resolver_if_not_provided(self): + self.assertIsInstance( + self.Validator({}).resolver, + validators.RefResolver, + ) + + def test_it_delegates_to_a_ref_resolver(self): + ref, schema = "someCoolRef", {"type": "integer"} + resolver = validators.RefResolver("", {}, store={ref: schema}) + validator = self.Validator({"$ref": ref}, resolver=resolver) + + with self.assertRaises(exceptions.ValidationError): + validator.validate(None) + + def test_it_delegates_to_a_legacy_ref_resolver(self): + """ + Legacy RefResolvers support only the context manager form of + resolution. + """ + + class LegacyRefResolver(object): + @contextmanager + def resolving(this, ref): + self.assertEqual(ref, "the ref") + yield {"type": "integer"} + + resolver = LegacyRefResolver() + schema = {"$ref": "the ref"} + + with self.assertRaises(exceptions.ValidationError): + self.Validator(schema, resolver=resolver).validate(None) + + def test_is_type_is_true_for_valid_type(self): + self.assertTrue(self.Validator({}).is_type("foo", "string")) + + def test_is_type_is_false_for_invalid_type(self): + self.assertFalse(self.Validator({}).is_type("foo", "array")) + + def test_is_type_evades_bool_inheriting_from_int(self): + self.assertFalse(self.Validator({}).is_type(True, "integer")) + self.assertFalse(self.Validator({}).is_type(True, "number")) + + def test_it_can_validate_with_decimals(self): + schema = {"items": {"type": "number"}} + Validator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "number", + lambda checker, thing: isinstance( + thing, (int, float, Decimal), + ) and not isinstance(thing, bool), + ), + ) + + validator = Validator(schema) + validator.validate([1, 1.1, Decimal(1) / Decimal(8)]) + + invalid = ["foo", {}, [], True, None] + self.assertEqual( + [error.instance for error in validator.iter_errors(invalid)], + invalid, + ) + + def test_it_returns_true_for_formats_it_does_not_know_about(self): + validator = self.Validator( + {"format": "carrot"}, format_checker=FormatChecker(), + ) + validator.validate("bugs") + + def test_it_does_not_validate_formats_by_default(self): + validator = self.Validator({}) + self.assertIsNone(validator.format_checker) + + def test_it_validates_formats_if_a_checker_is_provided(self): + checker = FormatChecker() + bad = ValueError("Bad!") + + @checker.checks("foo", raises=ValueError) + def check(value): + if value == "good": + return True + elif value == "bad": + raise bad + else: # pragma: no cover + self.fail("What is {}? [Baby Don't Hurt Me]".format(value)) + + validator = self.Validator( + {"format": "foo"}, format_checker=checker, + ) + + validator.validate("good") + with self.assertRaises(exceptions.ValidationError) as cm: + validator.validate("bad") + + # Make sure original cause is attached + self.assertIs(cm.exception.cause, bad) + + def test_non_string_custom_type(self): + non_string_type = object() + schema = {"type": [non_string_type]} + Crazy = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + non_string_type, + lambda checker, thing: isinstance(thing, int), + ), + ) + Crazy(schema).validate(15) + + def test_it_properly_formats_tuples_in_errors(self): + """ + A tuple instance properly formats validation errors for uniqueItems. + + See https://github.com/Julian/jsonschema/pull/224 + """ + TupleValidator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "array", + lambda checker, thing: isinstance(thing, tuple), + ), + ) + with self.assertRaises(exceptions.ValidationError) as e: + TupleValidator({"uniqueItems": True}).validate((1, 1)) + self.assertIn("(1, 1) has non-unique elements", str(e.exception)) + + def test_check_redefined_sequence(self): + """ + Allow array to validate against another defined sequence type + """ + schema = {"type": "array", "uniqueItems": True} + MyMapping = namedtuple("MyMapping", "a, b") + Validator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine_many( + { + "array": lambda checker, thing: isinstance( + thing, (list, deque), + ), + "object": lambda checker, thing: isinstance( + thing, (dict, MyMapping), + ), + }, + ), + ) + validator = Validator(schema) + + valid_instances = [ + deque(["a", None, "1", "", True]), + deque([[False], [0]]), + [deque([False]), deque([0])], + [[deque([False])], [deque([0])]], + [[[[[deque([False])]]]], [[[[deque([0])]]]]], + [deque([deque([False])]), deque([deque([0])])], + [MyMapping("a", 0), MyMapping("a", False)], + [ + MyMapping("a", [deque([0])]), + MyMapping("a", [deque([False])]), + ], + [ + MyMapping("a", [MyMapping("a", deque([0]))]), + MyMapping("a", [MyMapping("a", deque([False]))]), + ], + [deque(deque(deque([False]))), deque(deque(deque([0])))], + ] + + for instance in valid_instances: + validator.validate(instance) + + invalid_instances = [ + deque(["a", "b", "a"]), + deque([[False], [False]]), + [deque([False]), deque([False])], + [[deque([False])], [deque([False])]], + [[[[[deque([False])]]]], [[[[deque([False])]]]]], + [deque([deque([False])]), deque([deque([False])])], + [MyMapping("a", False), MyMapping("a", False)], + [ + MyMapping("a", [deque([False])]), + MyMapping("a", [deque([False])]), + ], + [ + MyMapping("a", [MyMapping("a", deque([False]))]), + MyMapping("a", [MyMapping("a", deque([False]))]), + ], + [deque(deque(deque([False]))), deque(deque(deque([False])))], + ] + + for instance in invalid_instances: + with self.assertRaises(exceptions.ValidationError): + validator.validate(instance) + + +class AntiDraft6LeakMixin(object): + """ + Make sure functionality from draft 6 doesn't leak backwards in time. + """ + + def test_True_is_not_a_schema(self): + with self.assertRaises(exceptions.SchemaError) as e: + self.Validator.check_schema(True) + self.assertIn("True is not of type", str(e.exception)) + + def test_False_is_not_a_schema(self): + with self.assertRaises(exceptions.SchemaError) as e: + self.Validator.check_schema(False) + self.assertIn("False is not of type", str(e.exception)) + + @unittest.skip(bug(523)) + def test_True_is_not_a_schema_even_if_you_forget_to_check(self): + resolver = validators.RefResolver("", {}) + with self.assertRaises(Exception) as e: + self.Validator(True, resolver=resolver).validate(12) + self.assertNotIsInstance(e.exception, exceptions.ValidationError) + + @unittest.skip(bug(523)) + def test_False_is_not_a_schema_even_if_you_forget_to_check(self): + resolver = validators.RefResolver("", {}) + with self.assertRaises(Exception) as e: + self.Validator(False, resolver=resolver).validate(12) + self.assertNotIsInstance(e.exception, exceptions.ValidationError) + + +class TestDraft3Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): + Validator = validators.Draft3Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + def test_any_type_is_valid_for_type_any(self): + validator = self.Validator({"type": "any"}) + validator.validate(object()) + + def test_any_type_is_redefinable(self): + """ + Sigh, because why not. + """ + Crazy = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "any", lambda checker, thing: isinstance(thing, int), + ), + ) + validator = Crazy({"type": "any"}) + validator.validate(12) + with self.assertRaises(exceptions.ValidationError): + validator.validate("foo") + + def test_is_type_is_true_for_any_type(self): + self.assertTrue(self.Validator({"type": "any"}).is_valid(object())) + + def test_is_type_does_not_evade_bool_if_it_is_being_tested(self): + self.assertTrue(self.Validator({}).is_type(True, "boolean")) + self.assertTrue(self.Validator({"type": "any"}).is_valid(True)) + + +class TestDraft4Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): + Validator = validators.Draft4Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft6Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft6Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft7Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft7Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft201909Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft201909Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft202012Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft202012Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestValidatorFor(TestCase): + def test_draft_3(self): + schema = {"$schema": "http://json-schema.org/draft-03/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft3Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-03/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft3Validator, + ) + + def test_draft_4(self): + schema = {"$schema": "http://json-schema.org/draft-04/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft4Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-04/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft4Validator, + ) + + def test_draft_6(self): + schema = {"$schema": "http://json-schema.org/draft-06/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft6Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-06/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft6Validator, + ) + + def test_draft_7(self): + schema = {"$schema": "http://json-schema.org/draft-07/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft7Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-07/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft7Validator, + ) + + def test_draft_201909(self): + schema = {"$schema": "https://json-schema.org/draft/2019-09/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft201909Validator, + ) + + schema = {"$schema": "https://json-schema.org/draft/2019-09/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft201909Validator, + ) + + def test_draft_202012(self): + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft202012Validator, + ) + + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft202012Validator, + ) + + def test_True(self): + self.assertIs( + validators.validator_for(True), + validators._LATEST_VERSION, + ) + + def test_False(self): + self.assertIs( + validators.validator_for(False), + validators._LATEST_VERSION, + ) + + def test_custom_validator(self): + Validator = validators.create( + meta_schema={"id": "meta schema id"}, + version="12", + id_of=lambda s: s.get("id", ""), + ) + schema = {"$schema": "meta schema id"} + self.assertIs( + validators.validator_for(schema), + Validator, + ) + + def test_custom_validator_draft6(self): + Validator = validators.create( + meta_schema={"$id": "meta schema $id"}, + version="13", + ) + schema = {"$schema": "meta schema $id"} + self.assertIs( + validators.validator_for(schema), + Validator, + ) + + def test_validator_for_jsonschema_default(self): + self.assertIs(validators.validator_for({}), validators._LATEST_VERSION) + + def test_validator_for_custom_default(self): + self.assertIs(validators.validator_for({}, default=None), None) + + def test_warns_if_meta_schema_specified_was_not_found(self): + with self.assertWarns(DeprecationWarning) as cm: + validators.validator_for(schema={"$schema": "unknownSchema"}) + + self.assertEqual(cm.filename, __file__) + self.assertEqual( + str(cm.warning), + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future.", + ) + + def test_does_not_warn_if_meta_schema_is_unspecified(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + validators.validator_for(schema={}, default={}) + self.assertFalse(w) + + +class TestValidate(TestCase): + def assertUses(self, schema, Validator): + result = [] + with mock.patch.object(Validator, "check_schema", result.append): + validators.validate({}, schema) + self.assertEqual(result, [schema]) + + def test_draft3_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-03/schema#"}, + Validator=validators.Draft3Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-03/schema"}, + Validator=validators.Draft3Validator, + ) + + def test_draft4_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-04/schema#"}, + Validator=validators.Draft4Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-04/schema"}, + Validator=validators.Draft4Validator, + ) + + def test_draft6_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-06/schema#"}, + Validator=validators.Draft6Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-06/schema"}, + Validator=validators.Draft6Validator, + ) + + def test_draft7_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-07/schema#"}, + Validator=validators.Draft7Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-07/schema"}, + Validator=validators.Draft7Validator, + ) + + def test_draft202012_validator_is_chosen(self): + self.assertUses( + schema={ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + }, + Validator=validators.Draft202012Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={ + "$schema": "https://json-schema.org/draft/2020-12/schema", + }, + Validator=validators.Draft202012Validator, + ) + + def test_draft202012_validator_is_the_default(self): + self.assertUses(schema={}, Validator=validators.Draft202012Validator) + + def test_validation_error_message(self): + with self.assertRaises(exceptions.ValidationError) as e: + validators.validate(12, {"type": "string"}) + self.assertRegex( + str(e.exception), + "(?s)Failed validating '.*' in schema.*On instance", + ) + + def test_schema_error_message(self): + with self.assertRaises(exceptions.SchemaError) as e: + validators.validate(12, {"type": 12}) + self.assertRegex( + str(e.exception), + "(?s)Failed validating '.*' in metaschema.*On schema", + ) + + def test_it_uses_best_match(self): + # This is a schema that best_match will recurse into + schema = {"oneOf": [{"type": "string"}, {"type": "array"}]} + with self.assertRaises(exceptions.ValidationError) as e: + validators.validate(12, schema) + self.assertIn("12 is not of type", str(e.exception)) + + +class TestRefResolver(TestCase): + + base_uri = "" + stored_uri = "foo://stored" + stored_schema = {"stored": "schema"} + + def setUp(self): + self.referrer = {} + self.store = {self.stored_uri: self.stored_schema} + self.resolver = validators.RefResolver( + self.base_uri, self.referrer, self.store, + ) + + def test_it_does_not_retrieve_schema_urls_from_the_network(self): + ref = validators.Draft3Validator.META_SCHEMA["id"] + with mock.patch.object(self.resolver, "resolve_remote") as patched: + with self.resolver.resolving(ref) as resolved: + pass + self.assertEqual(resolved, validators.Draft3Validator.META_SCHEMA) + self.assertFalse(patched.called) + + def test_it_resolves_local_refs(self): + ref = "#/properties/foo" + self.referrer["properties"] = {"foo": object()} + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, self.referrer["properties"]["foo"]) + + def test_it_resolves_local_refs_with_id(self): + schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}} + resolver = validators.RefResolver.from_schema( + schema, + id_of=lambda schema: schema.get("id", ""), + ) + with resolver.resolving("#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + with resolver.resolving("http://bar/schema#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + + def test_it_retrieves_stored_refs(self): + with self.resolver.resolving(self.stored_uri) as resolved: + self.assertIs(resolved, self.stored_schema) + + self.resolver.store["cached_ref"] = {"foo": 12} + with self.resolver.resolving("cached_ref#/foo") as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_requests(self): + ref = "http://bar#baz" + schema = {"baz": 12} + + if "requests" in sys.modules: + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = ReallyFakeRequests({"http://bar": schema}) + + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_urlopen(self): + ref = "http://bar#baz" + schema = {"baz": 12} + + if "requests" in sys.modules: + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = None + + @contextmanager + def fake_urlopen(url): + self.assertEqual(url, "http://bar") + yield BytesIO(json.dumps(schema).encode("utf8")) + + self.addCleanup(setattr, validators, "urlopen", validators.urlopen) + validators.urlopen = fake_urlopen + + with self.resolver.resolving(ref) as resolved: + pass + self.assertEqual(resolved, 12) + + def test_it_retrieves_local_refs_via_urlopen(self): + with tempfile.NamedTemporaryFile(delete=False, mode="wt") as tempf: + self.addCleanup(os.remove, tempf.name) + json.dump({"foo": "bar"}, tempf) + + ref = "file://{}#foo".format(pathname2url(tempf.name)) + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, "bar") + + def test_it_can_construct_a_base_uri_from_a_schema(self): + schema = {"id": "foo"} + resolver = validators.RefResolver.from_schema( + schema, + id_of=lambda schema: schema.get("id", ""), + ) + self.assertEqual(resolver.base_uri, "foo") + self.assertEqual(resolver.resolution_scope, "foo") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo#") as resolved: + self.assertEqual(resolved, schema) + + def test_it_can_construct_a_base_uri_from_a_schema_without_id(self): + schema = {} + resolver = validators.RefResolver.from_schema(schema) + self.assertEqual(resolver.base_uri, "") + self.assertEqual(resolver.resolution_scope, "") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + + def test_custom_uri_scheme_handlers(self): + def handler(url): + self.assertEqual(url, ref) + return schema + + schema = {"foo": "bar"} + ref = "foo://bar" + resolver = validators.RefResolver("", {}, handlers={"foo": handler}) + with resolver.resolving(ref) as resolved: + self.assertEqual(resolved, schema) + + def test_cache_remote_on(self): + response = [object()] + + def handler(url): + try: + return response.pop() + except IndexError: # pragma: no cover + self.fail("Response must not have been cached!") + + ref = "foo://bar" + resolver = validators.RefResolver( + "", {}, cache_remote=True, handlers={"foo": handler}, + ) + with resolver.resolving(ref): + pass + with resolver.resolving(ref): + pass + + def test_cache_remote_off(self): + response = [object()] + + def handler(url): + try: + return response.pop() + except IndexError: # pragma: no cover + self.fail("Handler called twice!") + + ref = "foo://bar" + resolver = validators.RefResolver( + "", {}, cache_remote=False, handlers={"foo": handler}, + ) + with resolver.resolving(ref): + pass + + def test_if_you_give_it_junk_you_get_a_resolution_error(self): + error = ValueError("Oh no! What's this?") + + def handler(url): + raise error + + ref = "foo://bar" + resolver = validators.RefResolver("", {}, handlers={"foo": handler}) + with self.assertRaises(exceptions.RefResolutionError) as err: + with resolver.resolving(ref): + self.fail("Shouldn't get this far!") # pragma: no cover + self.assertEqual(err.exception, exceptions.RefResolutionError(error)) + + def test_helpful_error_message_on_failed_pop_scope(self): + resolver = validators.RefResolver("", {}) + resolver.pop_scope() + with self.assertRaises(exceptions.RefResolutionError) as exc: + resolver.pop_scope() + self.assertIn("Failed to pop the scope", str(exc.exception)) + + +def sorted_errors(errors): + def key(error): + return ( + [str(e) for e in error.path], + [str(e) for e in error.schema_path], + ) + return sorted(errors, key=key) + + +@attr.s +class ReallyFakeRequests(object): + + _responses = attr.ib() + + def get(self, url): + response = self._responses.get(url) + if url is None: # pragma: no cover + raise ValueError("Unknown URL: " + repr(url)) + return _ReallyFakeJSONResponse(json.dumps(response)) + + +@attr.s +class _ReallyFakeJSONResponse(object): + + _response = attr.ib() + + def json(self): + return json.loads(self._response) diff --git a/.venv/lib/python3.9/site-packages/jsonschema/validators.py b/.venv/lib/python3.9/site-packages/jsonschema/validators.py new file mode 100644 index 0000000..a689e51 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/jsonschema/validators.py @@ -0,0 +1,1058 @@ +""" +Creation and extension of validators, with implementations for existing drafts. +""" +from __future__ import annotations + +from collections import deque +from collections.abc import Sequence +from functools import lru_cache +from urllib.parse import unquote, urldefrag, urljoin, urlsplit +from urllib.request import urlopen +from warnings import warn +import contextlib +import json +import reprlib +import typing +import warnings + +import attr + +from jsonschema import ( + _legacy_validators, + _types, + _utils, + _validators, + exceptions, +) + +_VALIDATORS: dict[str, typing.Any] = {} +_META_SCHEMAS = _utils.URIDict() +_VOCABULARIES: list[tuple[str, typing.Any]] = [] + + +def __getattr__(name): + if name == "ErrorTree": + warnings.warn( + "Importing ErrorTree from jsonschema.validators is deprecated. " + "Instead import it from jsonschema.exceptions.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import ErrorTree + return ErrorTree + elif name == "validators": + warnings.warn( + "Accessing jsonschema.validators.validators is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _VALIDATORS + elif name == "meta_schemas": + warnings.warn( + "Accessing jsonschema.validators.meta_schemas is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS + raise AttributeError(f"module {__name__} has no attribute {name}") + + +def validates(version): + """ + Register the decorated validator for a ``version`` of the specification. + + Registered validators and their meta schemas will be considered when + parsing ``$schema`` properties' URIs. + + Arguments: + + version (str): + + An identifier to use as the version's name + + Returns: + + collections.abc.Callable: + + a class decorator to decorate the validator with the version + """ + + def _validates(cls): + _VALIDATORS[version] = cls + meta_schema_id = cls.ID_OF(cls.META_SCHEMA) + _META_SCHEMAS[meta_schema_id] = cls + return cls + return _validates + + +def _id_of(schema): + """ + Return the ID of a schema for recent JSON Schema drafts. + """ + if schema is True or schema is False: + return "" + return schema.get("$id", "") + + +def _store_schema_list(): + if not _VOCABULARIES: + _VOCABULARIES.extend(_utils.load_schema("vocabularies").items()) + return [ + (id, validator.META_SCHEMA) for id, validator in _META_SCHEMAS.items() + ] + _VOCABULARIES + + +def create( + meta_schema, + validators=(), + version=None, + type_checker=_types.draft7_type_checker, + id_of=_id_of, + applicable_validators=lambda schema: schema.items(), +): + """ + Create a new validator class. + + Arguments: + + meta_schema (collections.abc.Mapping): + + the meta schema for the new validator class + + validators (collections.abc.Mapping): + + a mapping from names to callables, where each callable will + validate the schema property with the given name. + + Each callable should take 4 arguments: + + 1. a validator instance, + 2. the value of the property being validated within the + instance + 3. the instance + 4. the schema + + version (str): + + an identifier for the version that this validator class will + validate. If provided, the returned validator class will + have its ``__name__`` set to include the version, and also + will have `jsonschema.validators.validates` automatically + called for the given version. + + type_checker (jsonschema.TypeChecker): + + a type checker, used when applying the :validator:`type` validator. + + If unprovided, a `jsonschema.TypeChecker` will be created + with a set of default types typical of JSON Schema drafts. + + id_of (collections.abc.Callable): + + A function that given a schema, returns its ID. + + applicable_validators (collections.abc.Callable): + + A function that given a schema, returns the list of applicable + validators (names and callables) which will be called to + validate the instance. + + Returns: + + a new `jsonschema.protocols.Validator` class + """ + + @attr.s + class Validator: + + VALIDATORS = dict(validators) + META_SCHEMA = dict(meta_schema) + TYPE_CHECKER = type_checker + ID_OF = staticmethod(id_of) + + schema = attr.ib(repr=reprlib.repr) + resolver = attr.ib(default=None, repr=False) + format_checker = attr.ib(default=None) + evolve = attr.evolve + + def __attrs_post_init__(self): + if self.resolver is None: + self.resolver = RefResolver.from_schema( + self.schema, + id_of=id_of, + ) + + @classmethod + def check_schema(cls, schema): + for error in cls(cls.META_SCHEMA).iter_errors(schema): + raise exceptions.SchemaError.create_from(error) + + def iter_errors(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.iter_errors " + "is deprecated and will be removed in a future " + "release. Call validator.evolve(schema=new_schema)." + "iter_errors(...) instead." + ), + DeprecationWarning, + stacklevel=2, + ) + else: + _schema = self.schema + + if _schema is True: + return + elif _schema is False: + yield exceptions.ValidationError( + f"False schema does not allow {instance!r}", + validator=None, + validator_value=None, + instance=instance, + schema=_schema, + ) + return + + scope = id_of(_schema) + if scope: + self.resolver.push_scope(scope) + try: + for k, v in applicable_validators(_schema): + validator = self.VALIDATORS.get(k) + if validator is None: + continue + + errors = validator(self, v, instance, _schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=_schema, + ) + if k not in {"if", "$ref"}: + error.schema_path.appendleft(k) + yield error + finally: + if scope: + self.resolver.pop_scope() + + def descend(self, instance, schema, path=None, schema_path=None): + for error in self.evolve(schema=schema).iter_errors(instance): + if path is not None: + error.path.appendleft(path) + if schema_path is not None: + error.schema_path.appendleft(schema_path) + yield error + + def validate(self, *args, **kwargs): + for error in self.iter_errors(*args, **kwargs): + raise error + + def is_type(self, instance, type): + try: + return self.TYPE_CHECKER.is_type(instance, type) + except exceptions.UndefinedTypeCheck: + raise exceptions.UnknownType(type, instance, self.schema) + + def is_valid(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.is_valid is deprecated " + "and will be removed in a future release. Call " + "validator.evolve(schema=new_schema).is_valid(...) " + "instead." + ), + DeprecationWarning, + stacklevel=2, + ) + self = self.evolve(schema=_schema) + + error = next(self.iter_errors(instance), None) + return error is None + + if version is not None: + safe = version.title().replace(" ", "").replace("-", "") + Validator.__name__ = Validator.__qualname__ = f"{safe}Validator" + Validator = validates(version)(Validator) + + return Validator + + +def extend(validator, validators=(), version=None, type_checker=None): + """ + Create a new validator class by extending an existing one. + + Arguments: + + validator (jsonschema.protocols.Validator): + + an existing validator class + + validators (collections.abc.Mapping): + + a mapping of new validator callables to extend with, whose + structure is as in `create`. + + .. note:: + + Any validator callables with the same name as an + existing one will (silently) replace the old validator + callable entirely, effectively overriding any validation + done in the "parent" validator class. + + If you wish to instead extend the behavior of a parent's + validator callable, delegate and call it directly in + the new validator function by retrieving it using + ``OldValidator.VALIDATORS["validator_name"]``. + + version (str): + + a version for the new validator class + + type_checker (jsonschema.TypeChecker): + + a type checker, used when applying the :validator:`type` validator. + + If unprovided, the type checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + Returns: + + a new `jsonschema.protocols.Validator` class extending the one + provided + + .. note:: Meta Schemas + + The new validator class will have its parent's meta schema. + + If you wish to change or extend the meta schema in the new + validator class, modify ``META_SCHEMA`` directly on the returned + class. Note that no implicit copying is done, so a copy should + likely be made before modifying it, in order to not affect the + old validator. + """ + + all_validators = dict(validator.VALIDATORS) + all_validators.update(validators) + + if type_checker is None: + type_checker = validator.TYPE_CHECKER + return create( + meta_schema=validator.META_SCHEMA, + validators=all_validators, + version=version, + type_checker=type_checker, + id_of=validator.ID_OF, + ) + + +Draft3Validator = create( + meta_schema=_utils.load_schema("draft3"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "dependencies": _legacy_validators.dependencies_draft3, + "disallow": _legacy_validators.disallow_draft3, + "divisibleBy": _validators.multipleOf, + "enum": _validators.enum, + "extends": _legacy_validators.extends_draft3, + "format": _validators.format, + "items": _legacy_validators.items_draft3_draft4, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maximum": _legacy_validators.maximum_draft3_draft4, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minimum": _legacy_validators.minimum_draft3_draft4, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _legacy_validators.properties_draft3, + "type": _legacy_validators.type_draft3, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft3_type_checker, + version="draft3", + id_of=lambda schema: schema.get("id", ""), + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft4Validator = create( + meta_schema=_utils.load_schema("draft4"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, + "enum": _validators.enum, + "format": _validators.format, + "items": _legacy_validators.items_draft3_draft4, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _legacy_validators.maximum_draft3_draft4, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _legacy_validators.minimum_draft3_draft4, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "required": _validators.required, + "type": _validators.type, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft4_type_checker, + version="draft4", + id_of=lambda schema: schema.get("id", ""), + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft6Validator = create( + meta_schema=_utils.load_schema("draft6"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _legacy_validators.contains_draft6_draft7, + "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "items": _legacy_validators.items_draft6_draft7_draft201909, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft6_type_checker, + version="draft6", + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft7Validator = create( + meta_schema=_utils.load_schema("draft7"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _legacy_validators.contains_draft6_draft7, + "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "if": _validators.if_, + "items": _legacy_validators.items_draft6_draft7_draft201909, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft7_type_checker, + version="draft7", + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft201909Validator = create( + meta_schema=_utils.load_schema("draft2019-09"), + validators={ + "$recursiveRef": _legacy_validators.recursiveRef, + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _validators.contains, + "dependentRequired": _validators.dependentRequired, + "dependentSchemas": _validators.dependentSchemas, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "if": _validators.if_, + "items": _legacy_validators.items_draft6_draft7_draft201909, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "unevaluatedItems": _validators.unevaluatedItems, + "unevaluatedProperties": _validators.unevaluatedProperties, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft201909_type_checker, + version="draft2019-09", +) + +Draft202012Validator = create( + meta_schema=_utils.load_schema("draft2020-12"), + validators={ + "$dynamicRef": _validators.dynamicRef, + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _validators.contains, + "dependentRequired": _validators.dependentRequired, + "dependentSchemas": _validators.dependentSchemas, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "if": _validators.if_, + "items": _validators.items, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "prefixItems": _validators.prefixItems, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "unevaluatedItems": _validators.unevaluatedItems, + "unevaluatedProperties": _validators.unevaluatedProperties, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft202012_type_checker, + version="draft2020-12", +) + +_LATEST_VERSION = Draft202012Validator + + +class RefResolver(object): + """ + Resolve JSON References. + + Arguments: + + base_uri (str): + + The URI of the referring document + + referrer: + + The actual referring document + + store (dict): + + A mapping from URIs to documents to cache + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + + handlers (dict): + + A mapping from URI schemes to functions that should be used + to retrieve them + + urljoin_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of joining + the resolution scope to subscopes. + + remote_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of + resolved remote URLs. + + Attributes: + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + """ + + def __init__( + self, + base_uri, + referrer, + store=(), + cache_remote=True, + handlers=(), + urljoin_cache=None, + remote_cache=None, + ): + if urljoin_cache is None: + urljoin_cache = lru_cache(1024)(urljoin) + if remote_cache is None: + remote_cache = lru_cache(1024)(self.resolve_from_url) + + self.referrer = referrer + self.cache_remote = cache_remote + self.handlers = dict(handlers) + + self._scopes_stack = [base_uri] + self.store = _utils.URIDict(_store_schema_list()) + self.store.update(store) + self.store[base_uri] = referrer + + self._urljoin_cache = urljoin_cache + self._remote_cache = remote_cache + + @classmethod + def from_schema(cls, schema, id_of=_id_of, *args, **kwargs): + """ + Construct a resolver from a JSON schema object. + + Arguments: + + schema: + + the referring schema + + Returns: + + `RefResolver` + """ + + return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs) + + def push_scope(self, scope): + """ + Enter a given sub-scope. + + Treats further dereferences as being performed underneath the + given scope. + """ + self._scopes_stack.append( + self._urljoin_cache(self.resolution_scope, scope), + ) + + def pop_scope(self): + """ + Exit the most recent entered scope. + + Treats further dereferences as being performed underneath the + original scope. + + Don't call this method more times than `push_scope` has been + called. + """ + try: + self._scopes_stack.pop() + except IndexError: + raise exceptions.RefResolutionError( + "Failed to pop the scope from an empty stack. " + "`pop_scope()` should only be called once for every " + "`push_scope()`", + ) + + @property + def resolution_scope(self): + """ + Retrieve the current resolution scope. + """ + return self._scopes_stack[-1] + + @property + def base_uri(self): + """ + Retrieve the current base URI, not including any fragment. + """ + uri, _ = urldefrag(self.resolution_scope) + return uri + + @contextlib.contextmanager + def in_scope(self, scope): + """ + Temporarily enter the given scope for the duration of the context. + """ + warnings.warn( + "jsonschema.RefResolver.in_scope is deprecated and will be " + "removed in a future release.", + DeprecationWarning, + stacklevel=3, + ) + self.push_scope(scope) + try: + yield + finally: + self.pop_scope() + + @contextlib.contextmanager + def resolving(self, ref): + """ + Resolve the given ``ref`` and enter its resolution scope. + + Exits the scope on exit of this context manager. + + Arguments: + + ref (str): + + The reference to resolve + """ + + url, resolved = self.resolve(ref) + self.push_scope(url) + try: + yield resolved + finally: + self.pop_scope() + + def _find_in_referrer(self, key): + return self._get_subschemas_cache()[key] + + @lru_cache() + def _get_subschemas_cache(self): + cache = {key: [] for key in _SUBSCHEMAS_KEYWORDS} + for keyword, subschema in _search_schema( + self.referrer, _match_subschema_keywords, + ): + cache[keyword].append(subschema) + return cache + + @lru_cache() + def _find_in_subschemas(self, url): + subschemas = self._get_subschemas_cache()["$id"] + if not subschemas: + return None + uri, fragment = urldefrag(url) + for subschema in subschemas: + target_uri = self._urljoin_cache( + self.resolution_scope, subschema["$id"], + ) + if target_uri.rstrip("/") == uri.rstrip("/"): + if fragment: + subschema = self.resolve_fragment(subschema, fragment) + return url, subschema + return None + + def resolve(self, ref): + """ + Resolve the given reference. + """ + url = self._urljoin_cache(self.resolution_scope, ref).rstrip("/") + + match = self._find_in_subschemas(url) + if match is not None: + return match + + return url, self._remote_cache(url) + + def resolve_from_url(self, url): + """ + Resolve the given remote URL. + """ + url, fragment = urldefrag(url) + try: + document = self.store[url] + except KeyError: + try: + document = self.resolve_remote(url) + except Exception as exc: + raise exceptions.RefResolutionError(exc) + + return self.resolve_fragment(document, fragment) + + def resolve_fragment(self, document, fragment): + """ + Resolve a ``fragment`` within the referenced ``document``. + + Arguments: + + document: + + The referent document + + fragment (str): + + a URI fragment to resolve within it + """ + + fragment = fragment.lstrip("/") + + if not fragment: + return document + + if document is self.referrer: + find = self._find_in_referrer + else: + + def find(key): + yield from _search_schema(document, _match_keyword(key)) + + for keyword in ["$anchor", "$dynamicAnchor"]: + for subschema in find(keyword): + if fragment == subschema[keyword]: + return subschema + for keyword in ["id", "$id"]: + for subschema in find(keyword): + if "#" + fragment == subschema[keyword]: + return subschema + + # Resolve via path + parts = unquote(fragment).split("/") if fragment else [] + for part in parts: + part = part.replace("~1", "/").replace("~0", "~") + + if isinstance(document, Sequence): + # Array indexes should be turned into integers + try: + part = int(part) + except ValueError: + pass + try: + document = document[part] + except (TypeError, LookupError): + raise exceptions.RefResolutionError( + f"Unresolvable JSON pointer: {fragment!r}", + ) + + return document + + def resolve_remote(self, uri): + """ + Resolve a remote ``uri``. + + If called directly, does not check the store first, but after + retrieving the document at the specified URI it will be saved in + the store if :attr:`cache_remote` is True. + + .. note:: + + If the requests_ library is present, ``jsonschema`` will use it to + request the remote ``uri``, so that the correct encoding is + detected and used. + + If it isn't, or if the scheme of the ``uri`` is not ``http`` or + ``https``, UTF-8 is assumed. + + Arguments: + + uri (str): + + The URI to resolve + + Returns: + + The retrieved document + + .. _requests: https://pypi.org/project/requests/ + """ + try: + import requests + except ImportError: + requests = None + + scheme = urlsplit(uri).scheme + + if scheme in self.handlers: + result = self.handlers[scheme](uri) + elif scheme in ["http", "https"] and requests: + # Requests has support for detecting the correct encoding of + # json over http + result = requests.get(uri).json() + else: + # Otherwise, pass off to urllib and assume utf-8 + with urlopen(uri) as url: + result = json.loads(url.read().decode("utf-8")) + + if self.cache_remote: + self.store[uri] = result + return result + + +_SUBSCHEMAS_KEYWORDS = ("$id", "id", "$anchor", "$dynamicAnchor") + + +def _match_keyword(keyword): + + def matcher(value): + if keyword in value: + yield value + + return matcher + + +def _match_subschema_keywords(value): + for keyword in _SUBSCHEMAS_KEYWORDS: + if keyword in value: + yield keyword, value + + +def _search_schema(schema, matcher): + """Breadth-first search routine.""" + values = deque([schema]) + while values: + value = values.pop() + if not isinstance(value, dict): + continue + yield from matcher(value) + values.extendleft(value.values()) + + +def validate(instance, schema, cls=None, *args, **kwargs): + """ + Validate an instance under the given schema. + + >>> validate([2, 3, 4], {"maxItems": 2}) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + :func:`validate` will first verify that the provided schema is + itself valid, since not doing so can lead to less obvious error + messages and fail in less obvious or consistent ways. + + If you know you have a valid schema already, especially if you + intend to validate multiple instances with the same schema, you + likely would prefer using the `Validator.validate` method directly + on a specific validator (e.g. ``Draft7Validator.validate``). + + + Arguments: + + instance: + + The instance to validate + + schema: + + The schema to validate with + + cls (Validator): + + The class that will be used to validate the instance. + + If the ``cls`` argument is not provided, two things will happen + in accordance with the specification. First, if the schema has a + :validator:`$schema` property containing a known meta-schema [#]_ + then the proper validator will be used. The specification recommends + that all schemas contain :validator:`$schema` properties for this + reason. If no :validator:`$schema` property is found, the default + validator class is the latest released draft. + + Any other provided positional and keyword arguments will be passed + on when instantiating the ``cls``. + + Raises: + + `jsonschema.exceptions.ValidationError` if the instance + is invalid + + `jsonschema.exceptions.SchemaError` if the schema itself + is invalid + + .. rubric:: Footnotes + .. [#] known by a validator registered with + `jsonschema.validators.validates` + """ + if cls is None: + cls = validator_for(schema) + + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + error = exceptions.best_match(validator.iter_errors(instance)) + if error is not None: + raise error + + +def validator_for(schema, default=_LATEST_VERSION): + """ + Retrieve the validator class appropriate for validating the given schema. + + Uses the :validator:`$schema` property that should be present in the + given schema to look up the appropriate validator class. + + Arguments: + + schema (collections.abc.Mapping or bool): + + the schema to look at + + default: + + the default to return if the appropriate validator class + cannot be determined. + + If unprovided, the default is to return the latest supported + draft. + """ + if schema is True or schema is False or "$schema" not in schema: + return default + if schema["$schema"] not in _META_SCHEMAS: + warn( + ( + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future." + ), + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS.get(schema["$schema"], _LATEST_VERSION) diff --git a/.venv/lib/python3.9/site-packages/nacl/__init__.py b/.venv/lib/python3.9/site-packages/nacl/__init__.py new file mode 100644 index 0000000..cc8b7ea --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "PyNaCl" +__summary__ = ( + "Python binding to the Networking and Cryptography (NaCl) library" +) +__uri__ = "https://github.com/pyca/pynacl/" + +__version__ = "1.5.0" + +__author__ = "The PyNaCl developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "Apache License 2.0" +__copyright__ = "Copyright 2013-2018 {}".format(__author__) diff --git a/.venv/lib/python3.9/site-packages/nacl/_sodium.abi3.so b/.venv/lib/python3.9/site-packages/nacl/_sodium.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..5759d60caf6179526c26a8d1d108a9cc8103ff29 GIT binary patch literal 1021039 zcmeFad3Y5?_68he6@j1_Tu}sBbVNlaxJKDcG^mIyDv<#M)B!gV#03XT0=b4L{%~Pb z927xxL{S8R5sjk2D6S~(+n~@GP!t7M^1bglr*3!OE!Xe+{=7bqj@A_`mU;SK;3lyv+QU8G+0QWJVw} z0+|uWj6h}tG9!=~fy@YGMj$f+nGwj0KxPCoBaj(^%m`#gATt7)5y*@{W&|=LkQsr@ z2xLYeGXj|r$c#W{1TrI#8G+0QWJVw}0+|uWj6h}tG9!=~fy@YGMj$f+nGwj0KxPCo zBaj(^%m`#gATt7)5y*@{W&|=LkQsr@2xLYeGXj|r$c(`Mza#MGuiN+F!T)-A@LwMD z=L7%M4YRV!t(k-HxAR_ES;L24(0Awsb~E^A3W$UM*){L-|3+3ee0cHbNyR>4EWep~ zS~h4_1Np&g{bh9;WKFSO@@M$)3B@HNCq!?>^4t8D)^38A81O${+fTV}V*Gr3c1yvDHj|Ha`OHKF+0%dZ@36tVo4Z%}?W9;O8D+A$3A#?9fw zM_qO01b+Zy`Stig`OTfC9C+>e=6DAG4j*1}<#m@|dFk-WuN=b{JH+z)Vv^2lGrko9 z|M5B&la}bezMS;wc|p%WiIa~Wb}#0da>xH2*5Sji95;Nz_2WlgHE#IT#n%E?Vt#xj zh5QG8tndCKpJUtw{th2L_9}nWcpc`~#PA$wb$A`iuS%kO>nb^Z27;$BPA zH*Jc4nq?jBu6Ymt(e9pv|5=wx|H{eCx&r-__LMC1Z)OBCBaj(^%m`#gATt7)5y*@{W&|=LkQsr@ z2xLYeGXj|r$c#W{1TrI#8G+0QWJVw}0+|uWj6h}tG9!=~fy@YGMj$f+nGwj0KxPCo zBaj(^%m`#gATt7)5y*@{W&|=LkQsr@2xLYeGXj|r$c#W{1TrI#8G+0QWJVw}0{z+e;4j!0ax~+45Wy5j38fN8JX5~lvevx0*u+iNO__I2HZQphE@aP|I)rf$T%BdhbPCRclh+l{jeB5U)ja^C<^epSw9 z{)v2^A9=BBwYL{Y3L8r(08)OsuMSE!#L^}l&l*>FaHbV1FAne2c5 z+Qxj;vukzH>i<{Q7(MS$utz^?bGl)CynbElp^K|)bGqQFFtXd*eTVcJeJI*mAEaZl z4=^yMDq)!Ajs@O)=}W!``yHoTP8^Q#8UuFaV{1~)Lz-3lVF{nL* z^6l9b=b;r4)(pc)DOEw$tk=12>UvqsYY#JpILJTu&#?@ zeG|V>`jC|h*6U`NXg$rsAl7w0)=Np`W4&X9V*LlIc+F5I{-8bX=2uRh4OyC-UpZ)D zer10Md;jIM;f0kVbG0SwYD=nXmvVxNjsS{(@=$ z@~?{9W*iO7^1{ts4Eo#XX1h zEK*PEy@WdIe>Qfg+y9(z-01s5{-+3zXB9;b{)ft2Sha|1RaoWx&xcUKbp6jux1y_I z|MT`>b|c__+G1EBvuXLCJ5f0;|MPbc2K`S7flx8(@;{f5&^IuZa5$7$^FPmt#4h#y z&nP-1@{L->)c)s25zAG{{m(@{fmp@oA_hE_|9O-|KIo0`VFYdd=bk|_;=ccR0_9=Q z$xS)^&tNb}>3?#40s-CFfllRreyj+AKEgn&|1ncOgTYF`ln;Q_Sa*SRhq9@j>2nfs z4jzUE`PMu)ku`_1=1m8K!b*v%YR>mM5C&?Ow}#uP`4?=7cq%dI^~F==4fZRDP48?~ z$~MK?bb2bY={X<@*z{fi$vIgZwCRfwB_*)wn+LLo&RJ!-?$KHKcJYKnb`j31%RGde4kFz+-H`F8Sh^no3F zUv2u)irxHCA9j#alZH*N2AXZ>3-$`J>DB0O$p5@yrGjRl3fA4_hBcM{`HDn7)_o1D_@5al6aO<0u|?4T z6jt_KEcUyyit$ciWfk3yxuBBe-fsu_{^u&jCGbBdvmyAO-pVwQ|LGw`VgJ+iuWUNG z|LKJ~>VKx8(Lnx#d}@7};opQ1!kgCK@~KZ5Pyo>*C`Q*MpIU<7G5OSB&!?2g zn)%e*{RK1`B+jQ6`vd~|mZ>((Op#B$U&x-1NA8Av>T#n_nNR%_L;>Hs2H^>N70Rc&GAfGCr>;O_ z=$QGhG7`Ct)!}=q;S-J#sFF{;{txR|@_cH~+3eWl3h@yej6S75t0s}}Beo+jVaLp$ zJpq2cKa)6#*wgU6^k-Kqnk4@0d@B{KogCIw{%rp+)}Dq{{MmCin?Kkf;liKDrw;Y~ z*}oXNze4I9kd5*|n%+`P3nG`=7mxTb%!?GoPvlyOFN{ zxpp$T8umX^3)qc7KJ^x(nl$}SA5=~&pE?ADLI2Z%Kqy>w`JWaf^bO3Z07r>6|1(Y` zHjq!XQt)Z}pTCG$rtm*aeFCwH<8CtGsr=7{B=SK|gAXHU^FL?i%ZU5_=Ms8R4o1{z zruILLz#ygn`TEQ#fq;JYMgyJ7|2#}0AN1RZO9(oYPo2hKC1A>#PfbE&5Fp<(-A*EQ zC(V57L_zEFsrPPBtSR!TMt%jc=^t2?l=;*vAPU&@9}p0+d!c-)2sabh^t=19hc2I5 zOd@tK&3tMoYFj?_%k|c`ZKl+hT1hJ=K z)2m_A??-KVn8f+i%~mQ{2Rf{&@~IwStm6TPlVJYmSjB4jRQ{j9O8>K?k6=yWf8McD z!TQJ~orkhi{^w2-`Q!Q`Vh&;z|8pA3B%d0HyMHF1`g^wTe-1}o`k$>ZCGsz-3jySO?3RW(-;HavnY1> zR^xXJzJ781W;lNH$gjKOH^=ym4e*-~&#zG&ztbGQfgyhVjo$$Qe(lrYx2?qbTc7+o zI(&berTtxtV%Ogd_#M;VUHiw*$Ni4q5^(Sqpj_r-k@34Oz^^o(-^4h6g^u4S@*Cs$ z6&b%y0e;=$`E`%u_j|GRw-x!_&7h3((%ksHk7C!~P52$t-!Y)atv;#tN1tx5+7TB| z)tS#1(NUzC&$lDca6HvjHf96y)D#9-X~t7Oo{?re^%@9+@zi@00EC9Uu6XKs68c`g z2B8G4+Aq4*cxW$$XAX7STAByt@~BcC6J+7dsV>-}++bu4*4f2>hUj8flqELA+UgG~Jx zrHysW;;GM073>yI{fr1mu%{9GF`k->+Q6P9p1Q?K1?v#&TgrH{O;;8W7|KM##ZSHu42sgv1_fd9$DuwdTPT2C%U<+Sqovq2d2KZ6K_ zicy#UIh};Qf%!We4yC~S&;25?0sqsR4vBoz^gkDiSf=nlr}zY774IKsz*G632#I{q z%izNZ+WgOTC&`HW{^vH7he0Pd4VzvKo4!6*uqVmqpRiKFT6($3SStT>HHmzza}bjdtNEWxz|UV# zp2UcU$ndI}OxEu68g>~$Y z-q+OkKj)$@{m(&c2>z$NGEL-v+DcK_|70D&rjz@hcBrHNrz08-~h;@tZ(?jU2!6#_z-czuxiuHpcO5=J>TI zziB8Jd~J;1mN7oQo%kJtZ*UyH=RhR$wE^bb>*x5rPUDXL-V@+AFP>lPFu$n(F5Nas z57(CD=v2^oSK;7l)gT!0U*N)DXlE&`!Z ziNAPuG=LDr9VqAN>)6w?3so_^xg<27@xR)f*Kx>;I&u-JA;`ZCY|U=sK<7f903%b? zXLt5j&)dbE{bj;(o`4!XMb;Ai)0}(c97*m8Rw{TKJG@`YIg*pahD%b212St3>yMX) zupVJppFkBHfG!#0X+m8XFcVBXD)zqUS85OGrNwH zUNO$c8P1m4D>pCh3}?b~knjl|mZLV1shzEc0PC5VF+7%8+2LL*6+3&?D3hZC7DfW| zQtjqtmp6<=l%dUnHG+Q>ylAHhbs3XrK6m4AtdmOO;0sD2C+OMT8m6xA<%!Lhu9h;+3 zUGAOmC1St&!l?D1xe|x!vsUmQGhSZ72z13v*SV_wLuH3jbO+l17~>G=&hh@K|DzKf zny2O;hMHoxMpxA2(`b!$r{v_w|2F6VsD3_%vo{(Vg7f|j***p1O%x`wSc}aHeET<> zL7ex8#X1DAC~dPv`SLyiPm4gH@4Fa}kZBqg3p1VjfFe@Ou#<0iMBy~WJH<)`?;ye; z>%&;7fHCJLHsxj?Qd04Db0;J(1sFNS*I|W7oszh^jomDOC zNs36(!jQQV-LFJmtF~$why%7Nf)25ljf8pLS!B+`&vIt_Quda^mahJ5cjm_E|6}L@ z_5Wz;>|E9VOKe7}SQwmi$I_pza?@PSE{hwI+4{;4w2z-@D4qu)goxoCA?u-@BoX>HA=`l~~_lf%!1dE6tRb zukVWyq`U;Dp-K+z@4s**)HrJe|ACTwmlXkT>B}uS_*bd2Tz6D-YM?CtWu&4C?p%l} zc+6t}e7+$MHT!nm4(**w1>Y_T?jIFxCJK{x0H{bJ({m)-XXD>NE@(qyLz3k(FT1?v zB;*v~*#;>1DS$sTW7(%P}L(xF*^l!xD&b)Ol96G)CQu3G25HNz#FCivQoiY zJk0Q(!NN#qKO}+pzhYQNkjTf%c!^k>Db{lpE2f5zD_oAkKOy>g`zo5nF`~~v3Rrh` zlAbIUtm`f|tg}S)16U#Y4(n19`B?vlFa@H2sW$@mMf4{zaH*UOu`huo8MMy(7rtiN zGqSceXADXx^T(5PRIPW4R)H}u?iB0qia`0DQbs-vJ&(|`DfuT?fD^#g@*OlBkbmSm z&s}67Hy|MJy5oU8%m?CCh6jxQQH;JxzB3QMWAcecpbzsK=lGS9Uu(y2vhh1Jz;9qY zzs<010e%60jG?D~cGz3HD)~h2Rq$=9my4m7%U41>vR3dPJOKIjg$*b&WzVU|eAR&! z5qgOA?+!TWs3b-EG1OF*Tk1tk5|mgRm^We;hz!nAC446%VWg+Q2B$q@09b>0m0K}5V1cPUBzNd^%gqU03uBYhEveOLKDPm06x zeSBk`^kmQ5oR2yx->uPVKz<5h_MAaFq;=-E z*R#pk{I;LfPB*{p40;H*%WnsvTbvimZ(D1xEWbUTefH<2E+^mNgSOJCB>C-1D;0}! z`voRPsq)*KNJJUZ{Ps~qHNMY3Rsjj%Wr>|T}F8V2_buH#hjMKVt)HP zY6DS{{C2LDip44!V0csIwjp+#(EUE-1l>PJYlidN z0dg~}?_KwxLJEB!h&rb4%YM%a>O13~b6os`c*5)c8&@3=-*3ki!90s%bW7s<)%YC~ z|1<)9V*8PA@y{QMP2(R#;e+5?a(lty2J>Y^G+8V7k1UFL-Px#R%yTqj?t5@_vY~$W zF!o6^W8McfQ)JBCPei!;^+BSbnlb;-AMMj*v_WB_@Z4q0ACr)it{HO^z@mU|*6{ql z;0Xb>jCm779x_d1Q?43_Diu*|#(b-$6XUH^1hkjKJD!EX@Vwg3LXQh$9R@hWdWvA3 zJQH>pz8{h*uPrzi>&HzMz9rE|I{5Q2O&a?%5JW=~qVPrJcZ5pmW z5Z#UR#pCxyi)#jYmlx`gmcNSXH4kDj%-#b~n_clNZedO7Ue*A0LDfH6dRG~J*}W`` zbOzVi*gGhIRImYb3lHC8+`XY7#7_wXA|KV}l%W?;yfI#V6aZ&!&cPDXxa_+By28j0 z-Z^`+kbU5-F%413b>cVA2KA^t-iQ3llfm1=3(8ne*8JlwAqeLmi+@GuQYhd%sH6GE zM(BECeKCJK+E^v^w+Fy^r}VeKoCPppf7?X%JKcKnMU0!$^tW%cPRHLa1Xa}EE@k_a z^Sb=)10?i)*lUNYK$^du0iKvtMxs#o1t<(CPs$xI`rC69Q4)W9yp`(ux5Jyt-~Lb# z!g{!24f)$!8Px>*?W?l2De)=RlfPo=pYgYqsLpj^_}gRHI)=1Pe>;RthSrliS=F@s zZ8H!D{O!qrOcP=L_SgNvJnC=v*Z!ovp8OnYqdQ6b?E_XS=4?W~Nl+?(`!^E#{P}=uaL;c`UBz!sOF{KHMlRTSs{^y^T=55KKFaNWBBTc3~QhUJxGeh zz_YYUiuG=xj9Kq)yAuFXC}=NmHGjM#B>&LQ^}P+`CWIqiYm|$Aeu^ssdj!Q!KbPQl zjDBwZE|%X-j^C^hzbfN5B*1S}o%{mpi>9BsKWJCO`Z);!M^gR#pckry^>a&$pnjeV zY;;xW=;tETtXn^)gDR?@ce4F-^m8%^eK~pzQA|KT|E`!Vx-MskL#Am)*Ig7*68+4! zQjz(udK%tT`uR4Ae5`vAVGyh7=L@m}DVqz}kNS|I4Y8)FpHopAqnAWKM_H+0J;`BB zrJo(bSkD)%(9gGfnCN>Fxdikxdry7U&y5U9payeE3jKUctEA9Rp^VYbDc@nRQt0PH z;Hvt0_aF6x{t5bdj04#lP`r)}IqY!ec?PU8ik*J8!tWUU+|@vC^@)9+sdU?c1rff{ z^6PIE8uh6%uQ7LfWe>3fhXgutjGy{+ltUPr`b1y7xK5Oz!yEfF%&O^w-!(;{ROdNO zb)H1Aq&XF5pgD?yW>xWfv+Go-ylY5E`NC4@54Aarmta$`_S)dS*wnvDbT)aNxB54m ztD_lHVXl5WUFT{K|7vQ}n)~IN&twS$&1cyE?b?5CrF4jYbs3Bbh^l>};*W*@7>jrM zq47XGf%&>gTgCH_tr(xwXE!ikRk$L!At-k9H449D=4&wM1N;)$KZ9cS59hngwuUt?Wem<}r6{;_?| z`8P`vcCZTUr9z&2ro3cbQ_PoCPT+P@)OK&Z^iwo!D zBKPsz-Tym9x4WO!8Tm=}CEa%Sne1>H5?>A3_z!BT#3$VD9;6b)eNWDr%*sP(TPFM@ z6eg1RY7w1XUL^^ADO#SbQiM$kO40R-*|xi9q43X0(W#0kcDsANN>Ljt6}&&3YFgzlT-Dt$VrYS%- zpk@jIvhxxbfGR3LBha?YbzSEr&LW{dDc2!3q8YOD5+^BU6QJ`Ml>G@a^lN=V6e~b> zUg8rg6#;tcM8livyuw=nf``^Z(iu&LE**@pJF8}*SE(rYzIvTJjOy+-U>T!15 z`ana)oVNd6gxWxq#Q)xIr6Nq15(e?6^1lPZSZ~IOFJd+Sdxm1Q^??gf_-6#ao}!7( zKg|Db%95U_rC}_s_}X-~8{=|0zSYpA8YQCGo!_yM_F3 zNiFL+|GSp)QJUuSRMbpiKF$B;fGTP}+o5fl@Vfl(J|y&K<|M$P6q*11WshJs^Vv!< zrya%p2epAH)_j`(onxgU@z67`B0ncMys6~p@G#aw!3z0#?KqR4|4Fo9?zd_u<>!3{C6J#7 zL6anYTP)S1_NVGI^x2o6X}nA!KMPUYmlwqUXx6_D%8y(BGlpUR`y#@ir2hAVV+l#m zqr3d~SD6_;D!}}g9Zy4==zkZXX5IdGI;bE`x;}p=+ov?u<$ouW(3hsi5Mj|8x%^i# zyZjf0!Zh9dR}m%gzu8tQ()3jq!<)+gzD*(@>mCFg#Om_jUr_4jzl?S`0BK78RMe&} z#vIgj`LC4<){`98RQ|U^80-0hmHF>6Ci$L(3jzOo`1h3l4;cqQ`saZzssCLpOnmwO z7(EZjKQB|sKWh8(UbJ<=j*$Fw{m+5y4K&^u0Mqq9!`2wZmvXRw`J%ql|T`_4M~hryu_T`6oZ{c|N#m zKGz)$2jn06^hsv}xf2nEx8^-}{vB5Y_AH8>KCQ-YR1WIX-JlQio8kD)Bfl|@-yGvN zHo$K}JiqR7{7!TH29jSR$FINfJ0QTXeLTO7?}qRN*3Zm;{s;g;yP$W5-R^%NGQaRYw6SrM}&qH0m^xI1hb&;Njx_<3I zt=i)?kQ#ER>n~1z-FjP=l-c@7bMHk|;oSXGcRDt{%O;X**3TgK<-cg_L_qMbgZ||* zU9T-Sh7#}NVRZjlVZ8D{-)ob*89|^*pFT32;C%=C6Q&*Vs)Tf6b+pw_ger%;Fk~f+ zj#LKSVX1@<4fACOAO6Vp@ToAoeH70WekIQoE~L|h9m=lg4E~^60xFWop{ciy6!zI= z`=F4|^v6C^xKJkZTWniLq`ro#igj-|_4rdw%dDqXO zR;NrhdH8*+R|-mB!Ews*cyJmgH^1Zet@Z{He#)5`>@#);?DA@WSeISV*6fDtkV+Vp$IwtB*jvtgUZ!bsb~ z$mDhXBj5SZt9|I_fR?=gSowt24G3spL&kZ(9AO5drGz;82nHT6#u(!<f+_z9Sh1GZob<`g(=n=9P z@W+MPUTw}O0P)7WVg9&CVD0ugp%~&S@ohK!j`7FcK_9R`;P-o55!l)s#x-6e$FI5Z zdmqJ)-zNNy;kWViSbmQ?eyd?`yagzi_haU(~-(-Nw{kP40eg>THBfF29OcST%WOVb%Ph@^gFz9DjF&$ZC40;5&`y zpC?orXuC?c@g&|zP+_S7#CXS_S&Dc-ZY7a^zvTA z&LkoPez0!bVT!ao4^7IK_{3U^)*p>qpq^vYqV?AreVM*Lvon0Zy{`q<8U|9n#5Wv7 z_~}mX%|Hj(D|>$?=d?CwH)9;L%ls0bIM~X(7#YA!(^ z*s*%*hA6kSIfun`jBW@LI0TKMWAr(&&DW49&5l*$0RBM=bv8P-@=)uT_?Pe}2F1Tr zcBt`W81xk|)ka@-u{fVV$6%mc#}<*u@7PD*YsWgPU%3zbfSqelI3utWg@Qee^&B{p z4n>M4_IUAnx{b;XXIQCB!u}5HlPpXUkJbxgJ=U<^geu;nhnPvNmhgrCWDq32FP`hv z_bFCZQP2u2t0X#*j|_UJowS!^4LtlZlc5QWLgJbsMhIl;0xMtXqCh1Qn!3 z*8}>n{dDBF3kiMsy$G-Z@_UG4w%wUr#hkYMeu{n2jBrp_{l?Fq^JCTlcVzcBme42$hh@@9a0vR-=}aH zkl!^g<1&T(dLZ}Z3-a4NB){?UR$>gp@>YPzKB>GNNJwFM>+~gIIeELD)-z3c+x|s5 z^0pRKQF(iJe_>y@y<9~?e-6Ka(GAGkzroYDm#?5ugg8xmIbIPZS${g$N=4p|ba+$A z+rD9}rwdlM{M z7lgk29t~Il`K|d>Fq^$>sF>5Xmy1w4xxKvIN=1GzB@E(CCBFm0Sa04Ow3j;&KKSzc zO-qyCdLRn=`xP@xeo0UHT}evF@5@pnvHVs`SyXk%Qg$F*&+bfbC!5N@dxtXC_;R%^QK5 z)S61X*#_&dyJFU1hij&MDyT>z>zes&pR&;lg`DDK>#$u(=*#jYfJIs6@+~lL`A9HZ zrhl?xPInzP?@`nSqKcSx*e4|c&a1Ri!8^N!;r%~JfRFbRU}W}d1mL$`If;C%Pa`lP z*5Q%>=S@(omb_OnYT*E+v1ZIQ)m%lRYsUVIChIHK1FTfAZfkC=`8a=JB!Csky2JV* ziF~X)Uq-BUMq%xTD2?XhUm^-h7i%@JMo=5$I$OrI8kwiAZ`&FDzgwwb?cuQM8HE7W zY9DKtFxHCzhgj{5!Xb*)^6y-SRsFG(-GpoLTxtIF1Hl@~zp-~v%oNf z#uY2!Zd|95$RF1y5a1yG8}4<%eF^sZNXX+}r!xq-x4tsc>sF}?xqkrxjo457fD@AJ zTKq*#d{MjjknF?pJpW##VSg}FIS?5Z?t}nJ)~bBb?kYUHMb_SFIWQM z`{?d6UbHV&xmyr{2mKX6f6d^lbJr_KI*<9iepBvLOL%NZQC^n=MuUDDZ$4bQ| z4j>HTO?AHd^f1=H0}e!}$opn*3Ig8!P>%rJ`ReB?==_ic?_{h1$yh})Y{{=xaxTs! zI$vEa#peHPeP-@*GEVOGUP2vRpXrBI1O5sAu27r9`af`Z9pMipUMn(GoluMk(Di@( zM&+Qt+Xdkd^ZUJt^|lrHZAQ7qf5z{96gz&K@H>XzvM|4Z|Frd~>ye$p*oU;{sf3ZK#-?A9ByNCw(7k32FmTlJhb{tW@Ja!XVyM z=VbQDQ!(o)nk46CHoX%tB+DBa>s04u=99=D*LM)m zKrt`%2H?IZ=4H48#e_|__o2R7X39C3!4H(PElHuAM{AW7=VgR4=DbYSQu0r(pY6a^ z^>Z*94(rq74GrXK#0uU5=&$J0Dnw7?-EFK;Gj6(DEDHxBo zM(UC>9{s%ms)yszecmQV7mpSJr|FjS=}F_!x8G7NP92Z_8&r^FjYpTWeadQG@#upj z^kv`;1RvBbi${lmCoo$)`Y0n4GEF1K%99N4+PgJb4)n*O& z6IiQ(^_MqfGeG{TnY?=^xwQ~xh$u>ON$XRkKl zcZ|K-SQ_FNuve2bzFqpHVw3om-)$L$Oj*8tfX(%Vg;n!qC#;IcIdW$r|Inxa-$$t8 zR&%n=^<1K|&Gn(0qV`$OhGc6Ko6&c_hpRNwYQ8PDDQfERrrG}SCjYq5Pm=oOe)SrK zdos2&@lAy9YjLcY()Ixgxqly@^A7?YB0bCNWz@2@et_%Q z9!YB3!r%IzLct***;;P^BK(w%u5W5{x&j`1^_pZRb`a`*1O*WM=-c8x(H>biw2J`; zd-ND|idmh#Elv(V#JtwN8fnr4tS!fa#xJZH?463oo>{$oD4LXBRf7-G-(A@ieF?m> z%Q$JTXrY8iJ&nGsv-S#SnvhF=DxH6h2ySyf8z&^P%l?Jig*cCdNTs4Srx-=ZAEbm|(DO?1>I#Ww z?jDzK zJ7SaF)?mo?G#=^*tYQ0dKUhjPmmtpY)*M`4PEkF93-)6Y8yxkUq$@Y$x8DclcQ@$6 z{0bevQRFwx_+4WBItTb2n}FZoIDWt5!E3?Siv0Q+zZS;t1C%;^pYh!l(Qn8ozSeR4 z9(Vj!Bg*g=v_O4M+$zQ#jQ_~c^0QfF7|zcofQL2Ke=k=hrBX zUo*$AJ^5|K2^zuI#`tad)yKCJzhm$%0DTzWb0CuO+kn`?=l41z6ZH3<0Ka+Z@Ehm& zm69LZ6MU16-K4rv!{yq}mw*)Hz@vAa^LjwFp#q-;RfH*uK$2fky$*;TP zmuLJM2l%y$=hr9>-zRv&RrdMlnxHG{cR+w&`*iqi+hzT&PkwAq`uhjNAI$e!6ubUz!0*`cYZ*I!_d9+| zz`>h_a^bhg_+1y^R~pZ6a2&rv$8Qw*HFErlj9;e!zi#pTHfG1-`~4^DZ!7X!fO5gt z-1xnZV%OhI_#M;ViE;cMcl=f(0P&VNeyecB@tYptH!Ggsv^ajl9KQ+V$8y0p-uRsu z;MY5z-{3fY%^bh>JJBK-b6Uc9w)93NV@5BJV-tqjV#qn$A__Zg$jt*ZN ziBgF@#|vzcK_h_w|+YO)}w;-chg#qpTqYluGrs40{oWXcg+08+t-^M zzggr*dnxi=W&DN&_>GF^NBbF;mt!2i-sIQF@yj!QjRX8z#q--3XKz0F-uk<94fri^ z_GUY-*xzSS?Br!Ne#i88VjRC2j^8};Tju!9F@9qM{3gWnqx}r`_cX_EAo)e@Yk%W+ zK!9KScz*Hrb=wZ>Z+-HkeUth4;}z}iS`@qfZou!D{`QNrH}^Y!OTYo&!!>@3jNf$u zex>pJHpkhQLdS0u`LSH^6&b%y0e;=$`7Mj%_xpF&-&W)|ZME@hZv5UyvFq<9{Eq4G zV9<8jAt^~>Pb(eYb_E3tn8ezW5F?ZP2}0Ke#Z%G7PMnPkeA;m8MQn{k0H!_sX{ z@~h@>sRS!4SP7n^k1edyM;7Mu?0#X@qI~(owUJy8Ye6`!FmvVJ;J1SRV5y}C%0o{t z^c?KH^DR1V-?M*NmsooKhYib0CYD&bp-1^xVyW}_1+K9)LRHy@!P8jL=ND{^g`Yb_ z1EBf>o1uJQOP^nuK?GbQ=!;Tj)rl8bHhU!`^cPtkLeR*B9a9astJpa_ha{1tK7n;M z7FbHqlr0xwfn_X9(U+cFvCyT6)37SAP(IJ&US7YZF!EzzgdZPlbD{vn&ijSl;_ZsE z{9jyPna^6mZ|>ps_0zc6gWITAyMi6Wo9g)r`^T+j*=VlAL^{K&`b#Z*iUI2@-akQv zpYk~#d~23xM@o**=0PmjuOs1-e)2u@?d0k86Ouxi9Y$T{Y5TE zye!KR4{3ouIq)tDBCF-Te$Ga}AQgQ!q8MGkz;R^?+git>0huE@Np!#l?;m|4qt_=~ z%fVs$6NE$Gxe(xrtq-D}H*TAaA)gM|dODDM7rI;dyw#Vt`ng-VymdBjb#u3P#(FFJ z%!X^Ob56xs;}Zh9&v%SfF8dl7SWnLS7gfNX{6@v|8xzOx7{{+S`E_^v@{C{O0KZo8 z{2Ima`(&&2cjxm#e%o=y{yvLh*WcCn9n;@u-JX+xVQ}`(u^%cP)xte>dQFOn(=EK8){v z$8QNZc$*!+MaJ*C0Kd|Be#_$c6*_*S$Zyww3}2D)>lEPEEuP;R(1-E;j;G@!o^3^b zcRPN~jo?pcK8Y@g=nNfajX>D3r)?3Q>Bl8`_3S(-NxKhV_j*)n+V z+rbl)K&z8a5 z&&Kmy2p8xa#j8g2_T1W>>*cm=kz|)m)!Peeb4JSTmu2TgsHdmTZ~Gr7m}uRg<2p!)&#{a3kf{^2<9`>$j|updD8<^9ht zTWu95v_UU}-s7uTgpu5PGy;EreJ)1d0`hFb z3ZD=E$CCu*WcZ&9Fw&IazNnc(hVA_thl46A!#{44A*=iOhEGZ8%WzY`q72*fV6QzP zm`#SiW^_UsPV>x76>0-ftPI=xHLkK!k>NfLubh-i@;qbLFxE=|$A6yYEga%9`7JK; zUi}{>3n$^e#O;7YdjEy-5?LfYx3cdgC1mkosgd}6_*^NAo(~^^OU|7=4>s`*Hj!M~ zW`o?f=S5rlh3zN& z!Ttn=sVx4JB=oiA6P&@Kw#-L0So}HQ>0A6~c|eOy)3o@*6;Z6V$hSo+^LkpTXiIB{ z_ZW62iMG@aV?Exm{-QPI$= zy7%R0GF>HIb$|FByKJZX!>jGG)z*vXEuPWK?6MKjboB1^8YTTmsszFV}l5%31| zDLZ|MBj1m<>H_(3L_`nOD0|B)(cC=-p3c2(!!ZEvAGLzfQz_m$!<9zQlin zaEcQD3aVjpvBWYIccM~65x{M%R`itHCPfSYj(3i*U*07V*NpA5l0FFFYktS~P<7 z9{tUvlk|)it{^3(vsh{*ju)<$vS_@}6_=blix>JuI7rFO&t)L@<*R6GM?mne>EE|~ zNVGWpUmu{nyMMOxaDOnOLjJZE#h4?B7dGHGDhK0*F`y6d3)nN0-}w~^(3f8bVeVqA z1greQ!^fV7&u5b+zjhve$b69nJ0A8ZBgr)6m*+{_pr*=is`K!>9~Sx5b+QI*pU$?f z^YGh0kg;(&+rfZEduHe1KL$_8uN?sUX^~(~%Y#<~_Z&r(U8PROjJ) zhOu4=IK*n_;g3_Sv=8fQa|#^RGQ&*r|m&B?vyd5BM=&pQp+Sf5kko_D(RArT)l*~|Vd;!~#!r|2Aw znkh`SJ@3>2RM8Y~U$iYERM+!PzrGimnofX4F|y~KyaxrdrFeTO=Co71$51;#iYHdN zGH-^JiWrR`3=USR=bg?8W4-Y)5u+mS*LO{f_Qf3+ReIxgn+Vm%U5e1_B!dXOujI*| zcX~^T%|6@nPK!&)IJx}1gE}fdyLN@-2ljZFHiz{OL}FfdppyJ;Jg$i9L=-!F+#A1R z>~SN|CzeOE$2kTqY>!7^WjLulUcLcMhVAj%f5}9fJw6E7FdKCWC+%?+YSwL!$ASu? zp!T?!?Q^p0vd2Xv^zFv22vRAWW{=NS%w~^AqfnTpkx@~pnk%9t_V~L81g{>xefb^3 zo5~(PMIs;TX9%;1)$H-Vz|S&P*y9z9-Z%hh+T#(5CdueFB} zoLK(N9_JVM)x4vp%|Gx9;%ql9$$;XKO;s5E27vC+U)V~_X*zRB1Uh&X?RoFLwL}&tI9f(jK$sj^!DtR({+(U}P_PFiMspY2^>Ztr| z{7)V7^WJ1-rSdZpi@(X_XX+av`ML96R&-0Yn-LVHD?b;ZX5I4hS5QUerzhG@OMZ?f zp+6@>04pFr2P$ThpA!^w+GgmZf2xGUgoP$Q&snL+&mFI)AwM^h$jAC`1TZ2$N8`T8 zPfrP%On#2ROv>}i-5BUVe*Rdm^3zz!lgUq(6o=*KiyKqR&t9mb^0RDZNPghYXRul3 zSMw0ScxzBD{(KI@8K4@AVsuLKs|olWjYl`6|?`+n?V9s;EDInC;V@)#cBtNa#=a%DF0>=Fe|b%;wMULZL8C z)1T)lq9p$Oa4Qwz{P9)8o64VmNg^L>3&U#u{LQ;%d@aA)%1Go-`13nZo2nh7NS0qs zuu{RA@35xw=ec35BLPS9t1tg+qWZIh9|3=U`!Z9e%?utOwu?y$W4uzUr10lL8RO4q zPU3JR_vcT8tLjs0G#v2f(5KTJ$bo?3O?#HZj{S}P2J8S7JAG=8-!b|$81!L&+tyj9 z>yzKc8n^y2M?1O}#g5+w{Ep$b0Q3QV0sXv8_pj$)r(N~+)AlsvnLsRstj9XFs;a)+ zJZ=m7WFoK)jdSIQS6=>|B2E=Z*Rg7PFQA&brhJ5%=h9lBCLfsQ+iCDhomx4%EbUwGGZIn-uoR1Dw{3f(l^DwK`T8jbo%AzSyQJmWH3PFsYa+nTJ_Faz-v6p&4EbkODk3LX*0ej0CNiWIh@H=)Y z<(bPf0NmdocuW?3I(%lOVw9G=ViHuv!r+YHnLnM8UEW+0QF`!}UtoP5p$Cl8C0-%! zi~k%Y!N}yrfE<0f4bG)4^loXO&)A*C2ne=me{#a9vBR`x|MJ@6gZX~)!@1;ogxvf* z`hFWe@>j2z1CiVuwMQNIJTck|=sWDu<1ZV&)d*<3W%bz$*rQbpZvbXGikzxL$U(eZ0z{I;z2@$JO#7<{|-ipBRF zh-5xC%m%;BD3|`ejw|-}o&djj@%)y>@f+v(m6G3J$8WOnJ2SvqDnH= z`}@mXA^W>>hV+L)Hx_x`MWM_{n)df5)J|@HFR@awzug?>RQ9(miG2IpU$Dadu7APw zeUk(o3G8n#>}k{Fy$S-FfICrx_V)p;8MeO<%FVF-oq8n)BDwvYhdO3|H-`61VSkU& zR`C8~H1Bmpx!9vT1JfAA&i=N-?^yfxM<{*+zfYdG{_cdk_xQY$@Y~K11D$&o#g5-< z{Ep$r=a<6ve1_vUkNo;Meshf9*Z{u?@%&nc`33Bot>^X~uVd%iHzXOk3$dP`JpWsT zE`;;HEvi~B{{uFv*730OcEDZVi4*gp0_J~heF?1(n4g zsQOkO@*>vsR6Xww)CSgM`Ja_){^zi!%>TkzM*@!Ke^@dR*Z#8{!V2VnFEn)dA4;eO zi%H7-PphQL|AZpEo-*?a71~q#zE!ecf5m4fzL^FaK;mE%_&*FH{3&ZNoS@p)KM)Er8;` zpP;`1I{?L)2U$;SkKZx=DdznI0ew2KAi`Yer!fkR`c#?sId=;sJjvydKqro2#->k< zb(9C5^l7o^$9vX^GIUsgiZy-kyQaub5Akf+GnyVwN3lE;Hk<1=6ovaxkm$6eo*moc zO_hZnO%L%Mt|t)RV9qY{y5PRdSs#g}Ca?3_XYt#tc?^IsQH@c~+1cNn_i8H@bUx7E zKSrZprCIgzE1UnypOt&(pW6Sek|SbX`_i=<1RweY#h-i4!N`9_gT*Hc!sq)iUo)_R zB>?B)B$qd)DW?a)(i~h7-&hp8`I?}MUBUA;-9aCi|LA-Lzvt9TV1Wy!%smjvQ?qmQ zp-6txX*AZB#!o?Da0_-i#4wigygMF+;pZ9M*-QJYH;716jp+zMNwuPi#Rh zaAva&`a!wq#>rE;=pu5_H#@)diycw9n2SZ6si-W;kGuq{=4`G%T?X-(i@DcV`NAl{ z?&J;yVPU0ENlI}ofe;0R6N4GQ1T|Mo&$r&iB%~G6C8!FRT~w<$PQmXxRlu{$0Kc#{ z=WjIo^~mqrkBNK;TdM1Oj};GX{JS%Y!Q)SSNC8 zQ42`qgWiDHgP?EFwY~;aL492z*7}}c&_l%0b+aj0<+?86x*6B|*4#qGcz-rmY5aA* z!xZj9D;2EG9afl}Fjl;Z+F|{5NeF8v!}^A<@h$#`U}av69CNQYtW!ZYAt6?-@m-7B zz*-?#{Z~<6%VoUE+zYH!u#O-M4#OZ82FEq9#&=E_>y3z$IIis!>uHLW`7ri#2Qxqm zjcanO)j+f7X2GiYaCKA+)?iT%rC(U7U|ssOVSSW^L9EsOxIRQ8Uk2Wsu2^}zGWrPX zDYywsBG1X35953TTCsc>LFJt&l^!es8m7&vj3*aFzRHh&Oc5_M`>HT98n5VTcp7W# z2hL^3$4n>yJT5UzDmZIf#D@%jkRM)!EbEoePZ-wZ?Q0PQek8fpQ-5| z`5H^21qiU%61s?Uxcrjh3n1rD{U($w@_EpAPRZ?rT)9dg$Q_8A#E*xEim&qeqn4cX zy_C1{kwo-ZzAuG0hCyihR`-v5kEcl2dC$S@GpeI;?EzYz{%uNx$VB%SCOBVNTYR)T zzi8`$`%01Jqs_f{7RwkEPlu7jAZ;1WMw2`0y-_>+@F{?A{<=F*B(m3~+a~FLz+J{s zjkOx#yEtrk$^knAG6t~MFyG@Lnz?^q3qu3hDe}VoCTJP& zB7arp*4bj0t10UUsIn`*Ra^Jt^;#-e{|i4S ztlv&y>z0wo$GQnlh*%#`Tc<|g9NY}ry4%nSq}kTW=(nVq7+Y5uk%#vanmkyq0)bjG z7cW%&=5YmD-Z|dZif-W9g#JOteUty&% z%&vGAHKPyDk3~en{Udo$(ySnD0^%x^L{?Avt+u48m#3|k{KGo~zYwRuUr8tv`*zj- z>#~=v?jPB4Ue61>Pk3_B?BVgC#+P8dU;m9>!yYyeHewH>@Aor%SWk+~9`5VC#2S>9 zQ2?bjal=KNisUx&Cse`r1T=pcob)`R3%ak)=`d}9t6#pfG8#;#u;>Hu~FB5xViHznWbV)%AHD*e*u|H#q# zL!GZ_aqFS$Q9<&TP4NBRW|Y&ze#Srm-Cc;%>}C2s%O~*L2dMnme!w3hAG*o$n?-(O zjNfg>Z)kwuB?*qMIL%oh^0q`aA?&c-9L3BX6nk zx}zoPG8J)-n)JTKC4rgdmodKTB9qrO#lm+EBJzLxXk~8r-W}3a&NjxFuPR7tu!b7LO`V=ie0h2mDaj4 zZ@iVtDo`(ncf6Q#9Q|}h0IG>ze2;fr80#>=Ay$2YZZaNTe3eGS;_Y(*zp27stOn{x z2Y()>i6M{1+uh-O3aZ>IbOQ1AdTvBU`p(Ao{NkE{-sKB)NXuVEb$O$&1ltHv)kWaF zG54|tsKszyW%OnDf*y})BDS6#@;-Qh4qVP2Zzu>Q%*IAp@AO~o(WHhbT~Ubs?U2%EqT;1u|8)B zL0F%^IFBtP*XO-Z$Mkuos8ApOeU>xWu$+ILNPR}R=<^(0QJ=@6*y-~G{EpGT!EyXf zbNmL9Uq8pMzwtXDz^{Egzt&-X0ev2-`T0-hYFDYx^0GO3-wcxPBIF*qH(=*X^%{$+ z{bwim{@q-3#}141LT(ASao;#{bZ?Q{UV^JM5^*N9BT!Ql@tKJZ>wPYsI`>piL67wC zMLyf7X1^q!IzAD1FY50~LUDw0Snm?RqGsdsM#Ox(h^id5z(U^1irHy477$V=w*ycc zh%{mJ7xMW29rUW3l}f@m`(K84KNbcP@xVe}If;C%Pa}XK*3UJnnDBQQU#5mw$g5&p z!~uvdkfvV9%SCP4#hB!_RHBO91FTfAZhOG6j*+P1q)=4h7DzuNk&m^JVeKYZ`O5ra zL=izN^vo3kpGFj`cyBPqAp8Yz^K8J6D)JMp;!R|2yjKoOc>GR4D}z|RIuupN5*}AX zkk9r(4VqoaXAg2+((N)V;f=3IwtiPT#~GpuS-krTpu!Lh^BzNiEYZCv0gF2!GN>BM za|kZJ_rAwN#|)w$MLA5FHvtuxhEA4>l7=4S&gV^)d*SmTBMLc0$<0qG>e&9vw7BP+ zn`tApIqlK9w*YaD*q=6nWw*EGewi~ku%WtXv{dmd!tHMmN{;bTnm&O%Lf_z6GhZqIif2`bDpqQ{$C zdR^H|HeT2BLp zd7>*x1(Vs|B!d%Ax=33dwIb)C1yQg^F5d)%Rky^l=3yy&yP~h zWY5zOCqyZE|3*y}Cy7$F2OlZG=V?}o*Uu{l6_g)Q3e3?%Y@ajF#f(Jj2^yvB@+wK_ zi_>z1IuxfdsD>z|=P1FMXclWX-+8;C+t3h*{nykC#hQo;Mf-G+C& zc>ch80-_YRp71$|e5}n3>q<5Ec`EuArEFo$!U2e44MbEir|hbMwf0g$6N*xR^%BCV z%=^+x1#3Ttl@ADn-se>9W9=5k`ZvJgxVBKNN1*SJ1q(?|by(9>(@i4;tLAmKr~#}` zOZbuZij@l1d+)Mwy^DoO-aR#)ME9o5%eSPV|AuLu|A`?to*&o7XF z<}O<7^i|jYpp$|3{{gwvR(l`HiI{QB5Tyx{|If=sn$lYR`vX#+Vzr=|O8@(+8@l@U z<1Cw{y3T8UNf`XHIjP(-0AyzxDcDQ1-rGb+j)-*NX%S!}nlJjb-tyHki{y!VrROi*oN#u{~ z(=esbfJ?ouxGx%zFUH%g=nY!u)$TOu?@qG|>CYl5oK;h;Lh0}1OMmeFXhIqDyxtc* zICUxHA6!-br=j7*@^AY+cNr_ne`VfPum&1TzQ=F|9w*K7W!+IR(SFZ!Vr4oUfMQuT zJqha|B4+bYkjT@l(}DGlAR)(C_j}F(9E|bR-s@N}kTGUFgkWHuxB7b<+m9G=U~C^I z3C8vbCFR(*_U*}HxvTaB`NCai_(Rzt{^%>5Hi)kFiHbiK{_g$M{m^(|e6e17j04yk zaJ+syEWe?@A-KjUM!zJ#X@%dYT!x`==M`FmKEN-K-0! z$X-?4jwUU-_)tB{bYGvK`&((f(ugkT`TC<~ieS~=Kh_CUunwB+^kMrnEp@$rtP2T! zy}AgnXanqhrH5S1p1WW*S23p@tbTftAc_rE?fqjfSgF{6J8v_*sop;}l|(+)Ct#o< zpEp1vak`@|t{|jxWCr;UNM&KLD{i~^m5cmGeP@gOr01gGH&v9}J>HM}t0~Z5oP0Qc zxI@a!A2#=%!X@X<-aoeHG!>6znUDu^U%raAF8DT?yjea}Vhkmo&D|F}l*#g;Bfy%o zZ29zGFO=z`$%cqcV+fO`i2XV=rHENR{Vu4YBKGABo3gs{=~qeUPup(TC5o86pJNJm z`XcrYjUjuKrkm-dh+;*|-mh?^m5PXUb$CUqIHEY;fj zZLYBj*emR@y7k-lu;&-di{c)(UVkg*E4+UD@nD&xnGh@-!0f^?P{B%hkc#WK529w> z>$f+83WBBUw-s!kGEmq0?KLE%VChNM2VejxShjxqrV5s2M%UB&l4*2JG>~n?oYUpu zq&rk}LYWZ`{Mh>K1S^#_(GMbqH`V&>DmYpV6zgTh$*8CHM&_L(Bz)^b<)N7?Ii$x;gkKh5>q zWvuP5-wv$M;Y_i9TZS6wu&&=;X!K=itlypiTHQ#}_1nLrBXA+Zyl-z21Vbt_3-V#*c`t2&I7+$}9QSOD;Zy)SVzRA^lHR{;SM|SZz6|M8K>jU$TY7@B()jHa>@s%!7~kKj#&+p)E(Jzk zcR8|D!?%3R5K3gCOJWA$dV391}WVOFX|g4&^Nnee43 zOr90)^7bL2KQku*7NzKBRD+?!(#wJ&zBd@*&Ao*ahCPO7aT^5K{o3U%Y0c-67U2#< z?(0Cp*@-@nG*jxh{IJdVuBcgBAI~E-yn;V&hngw!zv;UE(+N~jo$JH)X&bo8m4-^xc1`h#uh!OW?fM7bxYY(WJ@q29@WvXR(`3o)2OWm!>>dp=Jts zzDDJFEU2RLT+H?<&ll7o&qXBk<@wfuD$i{cb1TJc!aEv;!Zb~v_a62sfhacFd{M3I zvsNnB_2tQi_hA+$S=E1vL_XHf5RgDdOCXh4&o~cPPVa_#XMt42b_8y#*vc1yycL}( zwgYjOV%r8}w5&1j_di~0N9DEoG2lUYZRTyhNpKcFAd>t0U&!2iU;@cK419craO-j1 z0l}B&U_NU8aK5osf0%_m+$8?+e4ybZn?D?#PdLsWK1=(VhJ?`{{slGb_J<8Y74?Vv z=}2Uk)#VR=yD>CtodJs$)AxsWBlvMMH^Q3`+s6V_7bY6S>!*Nl6cyfTi`NIzWy>$HHt`4F3ggsh+1N#7bv4~+m{gauN^_3SU$|ZmLH{^GJAwG;j~9TVc#>j?iFyb z1HH~{GrYcn@)tA%$)!|LGzIsF$>p6net*=(Y?>nh3 z@>WhVseW6+0bLK{`s#`!?9e6YDcrY`66>pXN)0)58MnSVQ_57{YnpolaLKu|_0=(* zR6LTcuU3HEm#?C&-2uVBPGFC6jG@}2`>=7D#2&qLUC182a;D6d#pj*a^EB;IC2H1f zk4A$kYL9L~+ae5g*`vRc(3ddAOJRG|2zT7fjqtKSF81gE(HD)++aFGQ)Esv>W$#an zu}3>_BM|Sr-T^&Vd-N)QM(xoa(0Q})564CA(W3zZXOHgk@1=3n|6Kb_By zu&b$dBR=8!zC>HfW!BoaHCv+uhrkK#*G$gnaRM8223T?}TQa=-g zcX{`c(4SDoS)5RPG>!>ZF7CJqz1BMpK=KE$DhyPAeZ@^h}&*H|F5mh=jDu9FrVE>h55`A>X`ZLAvdG**`ZzP`TX+Q(0uMX zB1(?=JQ}=oK0gcG56~sEHeb(s%(sdG|Bf6QtAO(TQXXH|`PF^Yxc!LJ2+#H- zxc-hLwD;+R=zRSg+U2O?%DR5ScXd_6*YAy*DRPhXFbtsDlqVCd>sLQtV-x)(3KJ=A zwJGkGB=je`1z>TaC#kWy2RwaWzqOxWPHSzD5$%PFDAv~>s(4SeQpr8?9o|&mFU<{O z9SJzZx*oO(GB>+6=PlSS6Uyweh~A!y?=Z;ixbK%fB)8r7OYgs$1A)`dOK=SIlWk@oU(b1fp0e-ic9yj4!fMk>XpfG`t&Gm?VR~ibOuv`(dAnm5WgjJBVvj z+;OJ$I^05Hy}K80i3Ar<@V*~Uk;BGM6Htal)m!D9=#QwvO`qpU3^`pVmBi?t?q@sx zd6#M_H+enOn~F=yYxdR~@C3PctzH(Fs#%?Ws7h0^@b*!Vhu*Kz5wK>;f6TA)r*50c zsfqjy^quj4Tl{|@{%@IIwQBJmyi}ty=SVcszY=em8}u_?cQO|@^CL@V%b&Yu%AcwV zQbx+B$e*f7yjeL2tonC;z6VCrdukIliK>>8A^7gVAMeT?^>9ntP?}HoM?dk5zgA}Z zLN`(A7Rv3CmfkMTPSsMmRuXynl6Ma2Dpv`-%B2L9UwIb+0Y%$FAnD0nmMm`~p;Rta zIwi1{ngv^0%Br4Nt^|#JiH#mGkwLK9EwnZUp-hSud@Ho`6sW-F5d^Y@t&-yFV6&nqSfC2!``=m*Hu;dC%p|#p*7)A35XID zAqZ-*h;>j2%Eb$|OngKuSgfMhh@(}>m>G?xhK#u$E=2!Ct1_BV)Hs8ZQBot4dQ)o? z6g411kct6OcC|u4q~T@%zx7*dpMB2FO`C}0jNbGR0Mb6wolww^KUYYx>IT~w<+egf)SX3VWxEg)||v!DHE_L zj-Rc?;Ig)MRn#oB>vQ=dj$W02)6vzd@~=A@oK8Qw&VBmvJ$v%m>p<4Q3P*u#XZv*( zHC;=D7wF_Vi>R3`f-e4LAy;)I)wiubckTB>@9^<;F8+GaVNcj!N3c6o;;&n9ayjL% z&&9%P8Gqf5de^djk8iHcb>3BDj*J+8K!gd-SA@0p{$?2EpSl(O)>cjjy1T` zRM@!FjGz_Db(+JH_0;7*HA~fdcg=A5rzv)D`NI#~r_0}Z=-yra;q~hBr|bE;`TOrs z5#8Wa@vawVC(M!GxAciS@`s;@BggMQ!GDYB+Zu1{(XmQm?a)(`zRU21FS1C`$oFpG zrLZ0s#M*Vd6xIlk{cm->#?kB@GkkZ!?@N65y*2b*LylwZ{#q|~n~y(Q-fiyso%lkR zNP-cei6RVAyr`E?789?PdWB)7Hz+0UMYE8+j8zAFgqC4nr`S^fX+-_*Qy_JL5fwc{ z4B%GEH(C^w?^-1_rR%v*NX^b+u^(d1fdEA;&39b_(9U-P&^avC5tEL`gUm!(8)F6) zuxR@B3o;d>#F5~-^986HS4=4!t*%2vthX`dpkFR9G$4SrFwJ-6@D~%_pC6L$0X3|` z!j0R+IS}~&_;%?9$?#|W0pWZdLv0A>Iu_)c5jqi?^9ap(gl79ifzj+XkzAsBZOuo| zp!^s$A`Ac#27qWa0`N`*@a*KOOm^bT&f=5=_GD0B#i0JWqpKyJ*SZ(68z5D0pNix8 zwp2WK63@p#8vn8wYY@*-ecQTpIQRPOqd)W?{Dk|%pLWdX4-ec^(jVTkq-=k91odW# z<)`9wg!_finL|JLW}u_)m5SI8UJO!~sYu#Ykml4(OGlWQ=DZ_WPrKCJyUSO739F{M zREuJVF17xE8C~i(IDlTJOTA2L{uR2^LFfNPUFsvZ@1;w9^uT?0slKi9Y&;WAO4i-*s(-{<_m=bOkix74Gq zI~m_g(gah=XYi{|)7vM%(c*oGPa*-&$He(<%x}B6BNJNT_gV~~o4_KMmyEOl|LXA% zgJ^5*i4S3}(pKZ+1tJfQ`{c6&$aMP=8v76$Hz73k{H&YSchhK!RLo-j>tvy$qmIKZ08yV&*2q${r-ah@Wfg$|b*NiKHN&&t9U& zdw5Z%BFbKZA<|#B>&-Lr|2fRpE&|;rIA1&P))_H# z%UvZg^SRKaT>guCv&77&;q-+2iqM%u{(HwQ5TShj8#t87e>dYugF8)yjic#_`$uLVG-z-({y;I~)P5GXt*ddx8z8_;`k0usnr+l|A@r_=7%J=ZvCz$^x=5HTd z@Wj*Jk1;;|Rp)QgDcF7MQ(brXfP~xbtLs+7OqiPbu8RQBT<>1>AiFLe=GJwY=A_e` zTxpMe<-$adJscv+N}!*wOBB%_+k-oF*i+u5t=EA^jc5DFZ^rgdD9!Z~tgXy1-+%kh zX7tsAa4;N0_yGJ+=L8!w?5m~u{^6)MOJ98t4m7w&?hv2fcbKUCf`OVQn>S`h7 zrvk`tz95U3WqRbtlJzvdxoDQEZ(`L{ev?=1&?CFi1uU8IQ-&(}fx4MfhV!K6Um?GF z!8w0XeskZAd&zIUSe^E}a#M!l8~;z{H;ev<^P4($yg1)^;`zqw#XaJDBhE)=TW@>O z8gZL=y=_K5RG*e_XgvKK(~t9=TYi_~kMSND$QC}opZY*PiwR$QH`eevuqMDSv3K(4 z_8M#c?WL^R-tA8O7wNdDC7(TySj(*ySOH&A0^Ch~K_0R_NRT>mqg_I#-XlMVEhl6x z16xi8fFZ@9`)xLB6q?JB5ffohu0grKlkAn$3q)Ta}v4tEvW0ys;a9 zJyK_}5{m{>Z|5~aq<}39h@^~$gtrLZIlA6m=mmKb=xcfH|GXf=TA3a1`8R^Ud#U^| zQlY&^Gr8x>Q&d%5mF}5g1s5n75T5gT_R82l(cbm(`mM~_Wer!BR-^C0ELmcVv~r}CwbGtfi2YVw85EQS_wAnC~K(%~=6C)UYg8?;^( zRh?HYz-{guh692vT)y3{N;ZGe6GPI`nH!rR@dCOVmZp` zUsj*J(t8%%3igCvu`kJu&EMxw@8D0s<4Lcad>eG}8jHr#roJtCiE@*Yp!*Nqc3+(W8F{^=h5s9=w^F%o`ie610lf+JrLLtup?kc**>?D zJyB-RmuuAe_Vfbupv&{w8=~wDc?@7t)x~=-I3==j?PNXBU;_d}f0Kr=M(YoTfuf-s#qZATXj&P5S5MjkICnJNCYU>)eJsj8R@%Vv zdZD!M41TabM&21bm(PJGni?Y!n)+Qy-e_?4U(5gUQ)kL3Chs%07gt3~dmfWkS`4<+ z=w&@QSy9S2cjWT?IU=|5kyOK6K?BHp3LsGgiAkRn4r%8DSr{T}N2eLBhk=kd;*IZ! z>76^8JA$pneAh+N8&Qj+o!@bO;c7Hjp}kN0=b_ShAfrAl)~CrWGAy}A;q(!S)L=WEjXu8F9e!~QP6T@^A#%wveo{}5@?`$-OI(j`Yb zZnk~s=#{zrMMp1}EsTa-;X_BG4R84lWqRebc!iV+ScmcV8%(Pz?9B#Ssr88aLBgsQ zdg+aw?Bru4lq@R03z8qBlKXhP4hrA+h$^hf{5ZT9F3^nn_(c|3vg~XxF8AKMgN^rM z5Y&c4Rpcq|&k`84)yC0c@V@P6Ine0*T|FWdHs+DYoXOpq9Z`0N<|A)3zCPJY=OgW} zqKQ-M!{WgAxOMyM;=7xdry0o|Se8Z{OND{-U$18-5h~aS#EK9NEWU zHy$`+OO9?KUs-4QBJ_I>1@QWzd2DQxQ&-XkKm6E&iW%|mX>58~nrcSFU`#8#@C6rQ z1?2I2DxjARTap|8c)FZ+J~zDo%(QkPpM@7g6UnC%dwyO!h{d1=514RI>!GH9alR9k z#kcmCv44N4u@E8cRxR6gl`uG8L@G zEttw}PVPaT_qDhOA>P;F9w@x8NBqD5T`qn|-;_$oNAuYI#tyhxy8|-#{BvE1FF$-x zJix~n7)R!yn5b_RktG8Zu}1D(ky|W=8v+QAZYjW%03V~^0f6!74hPr)e3*j!0mh>{ z7T_s>mngUoc7aN*L+%f<(V+_l(7AQM?FaxU-rDbe6adg3q4`<@st%^uK(-|VKqaEC z!&vtZS$b{ke;Awls7%!ze^hVRA#W3O?MbvqeswqG?NPJ!`5y*UP-8We9{_9!l*BHm z#s&VOKjmPsI@FVs4-<)c^?*$R<_sqK_vi9|&W+r=B3E{|@`7GRq#S|?G{Aw`1DQ%iFrzgvODL;2CP zYYzr_zmT~5t~2uxvP%kb`CBbX%oR?;AhvKru5who00!${Rk=NP;56E>4=nq^^0&tF zw_q8Y8iNq#fTOvQ`&OJ;b7j#?-8v9s@72`6MlzJbhN)YNG4@_f4Q!klPhrEZEF zUQI2-X9+e;-8va#@5R)TTw{iTMZ20Sr(tUNSzdqc)0>xx?GraIj!qt&>ul~v^JS$~ z`ku76w>MV3ci)vm^GDeZr24tSA>zSJIk;`qOb?DbP2b);ICt$IICx=5s~k_-8sP#N zck__g9zh|C03gp}@?69-y0kn`$TK=FLJ*&wJi|Yvz3{=K`~&QXHCJx8zJYbf%KLK% z!Z&fZ*DxfRXbKyB1LKp&E&_WmVMwMMDQxr&jCvk>uYCiOBMham(Kj$6dF)B>E*pl* z$BEU_9-DL!7E|JugRq!U?rSNU-q(?l-q+`%s{9{xQ1K(tVhLs&xAozI!RYHRv%x5w zcZv5~{CZ^R`RR86M`s5*pV+u-QfTTDGNuHFlA{gTU0j~Z4=$H@8S&z<;E$ON*5WZG zc1Ysop$~5|xtH=@yie=rZ*?E9mSTXUKC1jHp9ObVVuahDD7}mqM&%7!1t?nO9?X14 zyG}rD%ye<6QSPd03eOUu3mSGncm%Z~*23;fo;N$kMQAd&YPY&aWC53p3`+dtWr*)T zgZSI#LL6VWBXQ+b7;Y5(o>PYYAu)ZaB34;AD}A%fpw_(A{R3E(8smVN9?b}P*T7P9 zFb4CZgfZHIR!aN@Gl;)oF2r}c9?7Jveasm0k%?0JZ;R=BW9&x5mKVO%7|#WZQhF<5 zdfpgcot2)rW$YP~cp;#a*nTyIdb*kV_u4X)ex^PKhen-fSh|br1u1mhpX@B#btgdu zUH5^1NY8$f=l_MLp8|)uMIOnooHv!ko9kBkB*~+0R08I*@#R$kH02dbVBko;n9m|^(!pIOQZG|jXEOPVXQNBxd#gK84X@J%fs=K?$ z*<>thogYfiNh0x^WY*u@4U!$0OW+^Zd5h&*5zS6IwMqwdnn8B2 zbjn&BZAm)SN(XhC(ZYMB(^t;sXe839P&%k1O}AG%bf#r}Cc-aAM%bRpk0Dhb zN2_tb_&3uzDGBsE>N1*>1$7iVLEyXFxoi3Go)xJAh3i z5?jrwfSK4&4cKxEJ9OkST{K-@FHu1oR#qXZaoR%ZaZN%V)yjOV#iJUFk4T&fsxu$! zmFNDA8Jb8%yir2H;~o z9?hZ~1SiDLCX&!VAD}sfvd0Bzqe9t119YWA+j!|+E*Hk93AFe)+Eh8{;e~w$R%P7F zw%ccAm8X|#@3XRs*-Mk{v$81@SC%f#_)hCZK}!oo#d49s$E{j#lD$?oWlA+KPuY|y z)x3OVQzot~aYaTB8|-J81Vv>=kM$+(yVs${lquD`JY`d+RP*wcO_{iIJT6(w#9Eqs zeNblRxMa%2?Yq~?>&k0hp0X)Zs(Ja!rc7Kp9`Q_>SWAgYCeM0^U&W#g&b+IUJM5?jsL#mh|W{Q|b!!p&qnrRnl|Ne-DZfGWLsJWY(4 zwJ23%#p9{!9FM0mZYK8x{C;FpE~>h7hfa2M@f%?m3;lBNOxNOz*t=~&3l!aPm;(~n zISK$O`2xN)s*ClYhvggDetgxmD8(axQd_owh0IW{b8CZpUMOrsA%NR)#Y%PO);4#b zFH{~NNvtL}&3DQ8I*jlbbDx%MK*bdLQWIC9pB6P;_!hwm2NFG2o6DsOL?R*A=6*#! zRYSL{q5Oi5Z7nomO8Bc2(o;gUub&7?qGG9uY=hwQue#7S#iW9u)2acR&|1hW6ow2# z#&bz(8tV*(G(%=Ggw-1ixrPjta!Nv4yc)Mc3b%9HG4khklJXA0De)io%Y-g8KsrPb zaVxnHMU#6Gb4;s1OkHCXxQI0bxa#IirbqCT-p6XLesB^}p;o80@b!X>-KIs5zt`muWo-iu=w z!a8EEuxt-bJGgTmG#HL_3+2wOO|n4tB|V_ihk0TWyaVT+xbR|2h1cCH&xvK7-EJ`n z>MD=Q<38nUnS3ge`_&(b3=vgq1CI$f4f>7Pqo5P`bjjz+)3}&mt_Yd|+<{(*g01cC zcISCy0~LlP!Ej%tl34wS!IQ9zo8)WeTIJ$}BG9nF^IdYt%Dqn^@_IN{gYefIzOZZ* zOSbNT2Si!E4`#~AZ_rg7-%IQ(6?-7yxf^9otI$OdR*+cIr9NP<$J1(Vu`K&V!%d1R z_2lasUE(G+n;VZDnO=0_R(HD4H_LU7wt3m&#%*w~fqW5FIe_AKIxOnGjYO;d`4;s< zA3^mj6$>|R6JJFrz5G}SrFNbNFHUm0CoVoJr>Vi52NSDPCw9&OjxLNR&j(?DcR(=W z_3_<57xsRAd_9^7-C&(u@W#G0*vY%Zw#cMpef&6m=o2q~I`840S`qt??w$8bUGZ4d zN0oEjPPO* zFV!0h;7J)b|8C!UKahw$t_ujIG_mfTr1{o;(%kJnuQX%5pI3%#Snl?ESC^q&M(@)= zyi`W4bth%qvUA^hKf(du2f<7C*J|cs=fifI0yo zrgAGJ#TvjP#sqm}dex6-pz>MBpf_iW_AI0};Ta@YF64UfJehkI=qN3@@5jB+k^4T} z3k|v7gnPl4`ySi_7dhAp?38kJuEygy9t9OcFw?RzJPW!!*P^_LXCWcab$A}avxp$i zslmN`dmVmby1kIbyEFciF=bqnWaRM69->#=JD&Cm%VedomOWD%%g+xQgFjqLoNyjU z{9*g;GB#w#J0~mE5qe7qryW7Qpeycndi?%rBSK& z*VVD2Z|OGQz;Q!0xZfi7DSvRr)}u(0@7jjHaH_>WNjteF8!X(oO&ky2ek?xlDIV$1 zzacEx`PJ}#49@al4){R{vzmJ}AQl41`K%N+x${C{lNI8MH!}qDG$|}O)9d@!?EDT0 zBxmxw*Turj2p)V{u z<5aZ$X@TwYe0r9iANyr{nPhTeTa5@hY4(Omo$1PGDnc>IqW<-AL$X$z&c1uak=G>8ZKQ(tGy!jN$g6@?VV#8^Y zHy@E6VtxCfg{mqIb6@K_XG#(jR&2uh&SWT%7Ka6G8Rs1m6a$7VvouJdJ3ADJJEYfl zPV|MP>pOLfv5EDaZ!?GCkRh7 zxMOO+&{RY%x>ZY3)V?Dl6uwoFXoX`^)QSmem!zm^Ns8KqGpU(>SexRPIb+#gCe)%~ zaYwfm@X@Ya@QXcY_0D0rtEjo;Y;l3}39s35an%%Z0;V^0C2)8u-<*KsR_r`X{@@={ z2_e&2L^dHeJHIUHjTFT6@kLbx<3aWa^Wip;a3;%3HKR%y>Txkzy6R`XgQl$5{cvtm zuBF(ATko`$TVZ8!a^KF47*Kb)j+ zVQy-lWN3!F^ffaX9$BItswS9SSd`S@+!rC(&QJ|s&92z_?h@6EQ{mM!ISoRiv}&lF zQcb!2;pB4Mjzy!ZUg}FQu-Mt%hdDv12V^vqB93(WD@{T;5$(?_F&E|#Gc<8)y(a)wamFmW_8*-HMSMTaR zq&mv(k~N_0uASGx2TMs^L@(6J9WSXy`c6NVFIwZQJO1KDzs`2IUw8KQn#=g9$?UF7 z*1eR1Tu}u0nqXtME zrslF^fQ0w^QVOMrY+qPiu`_R^MPs3Gyr*z~O@ZpjyrPs0>;gsd*}FHk->qiV4~)SD!RyHVCw7cmB6jZc$2$R9%h+OG$P+S za70}+neR!)gLKO^-%3KrrbZ=|V~*z<15mOx0XBL5Qd3f_B9bK(U~lc2Oh_zClZ>`9 zpr?{CH+^@CS)9VT%Sgf!F`u^ms{rH7j5(dp(3;#@7FL~%fkXB&d=i{kw^)nM`(lyF zPcUr#7)n-E8dB08<@Ptv4C1QBn#~s7WGE1$!IDUEFT6b#*K3WdCB|omLZANvlLl;a zCrV*~2BfL2pvjH>+DMjWv~LaqcwRyd21R>(Qm{f+>kHc4zoZMiG22`%$yM##Qr4KV zompF(PIu$?6G6S|zYT9+_I#KdN%0OaeNA+AJzh_BLfVt69Qup$Hg$i#EiRa8&pV~C zoITsT0HyY<2?ETt=gS4FoIP(#7bNUSK2@I?p4Fa3eDXKB0vgXZr?4oY`SyS$Z_(1% zk%*r{^b0UQ*vCunCRQ1gi+c$c(~*7|O zP2tyTt|}GWB&UNVw$Td1Lr4wW_wJx>Ym^#6C#0w$;)rdeg8QVF)N!40km~+a($~W9 zkhEc8P)gh`yvqZSwUp($Qo+3j_@iBi;8sSa&SAN$uPMAsMv$nBu5o@rfRaUwOjmSH z)^v|VL>QT@>?~H~HrW@hX4G9+TDMZ`DmJ&Cgz(OfVaVFv^=a4;nRZ%|Q)>#6uGMVm zm15+Flw=D&sa2^T*7A_ks|GrY)!n30vxS$D>4pB8*duo>OcFSb1WIM;f-Ec{^s zOZL9XvF_}BmK#+oO`Q&aU^@=t1T5{uPHW+SJouB~nCHgG1>Uw^(Rw+$LcR!^Ulk zHT^w3exEqxGvED$_$yZk8{9KAGRqfcn%sfmq0L<`)k>0^I6vCBOM*`KxX-ZA^fg=B z?{nY7L7)?_@9ugfeFMPFy>ffpjWt`E?>p+okvl7o8|&^m10a#W6;YjOVZN&7ZFw+u_IE#Ok&S~f|wVXea5h?>pg?q>m6m*;_8)hA}icTIp=jggSaoI6HIXvIMm zgG5gtAddKTT2kTlI5t|YV~ zzpo}FE)FE>V-gIX`ud3{u}DbV%F6mZw`i~d^@YS~fyDBd1S4pkFNb&%csZ@YU9Kdw z;^0an@#kJBQW(w=9UoPawDufbkRVKR;AwY83Wj8eb(`GtQgBI8ghrcty(Vsw_6r^J zOLv*D^Lr3TS$%)8Z^cAaXAf#O(-aGPj;(#8T?^50Pys`L++hf~01s&SKFsU|l8*lZ zNyq<^iaz+q731J95C(s_*;#<#+p}LlV3o%p03!dJ@q{k+1w?Hrt4W1@g;?0negVPy z3li|SH7y>ID!uUoIxwvRY7moNw=UUqqUtvPdsgV>H$j&c{PUG5k_Pe!Izq zmaV3E67Ap5Xy1yTVI7v9kSp{`nY%&q6nXo;zz*WxgtcfT*I8VUC*7Sr>K-1|RWlpW zpJp($LC$Q9vVODqtut%s{3liGB{OTa%%t^Hsg;m&Gpdxf%RFrH4%iI7>J{>~D3)+( z0+;f(sQU6Z%Z>*Xh<+FkuE&>sll#5+-kt{UeCvbV**`)Pta4-@#Cn$|OfNk7IUrZL z9Uk&W_YeOM!f_->#Q){edh!0uvE=TPWeKkck#ceWG$u{U1$Kg^L%)$Qzrb$bCiilJ zWj(&N4^AQR$=!R4pzAhlVNgJF!l&)W0<{xAwk`TL!Uh?~^SiOG^+z0baAzxYgCN=) zl(&zPWFtYozWe#l2_<@9^d7QTQO>_Zwlr_1ZdE7$ciP;#rOCpL+r&07`s?tmU^_qM z&p*M;5|IkSYec!Dq}A=d3e*va^f*X(-6o!&)FSTMet=|8(b8)ML;`PeCqV26u#6Y? zCC;rB1l3WZ`E+5pLenUM9_L)DcS;bo7CN3(aOeMwpuR-qaaJB?<#-k&72L_3;wqVV zZC@C8YRhI)`)G<9huy*;skwwu8~xgTlG^A^Gh{m{MU5k{QTuF)nwHQWUbcUkN$t0H zl7VSza~jIFDn(68Qq+#0No`Y#8pmam?awUncGHp+wO`&iL({WT)U?B@-3C(Bv?N9C zvYFH#n@nnI^BrpXR-uOVSn-_Am>P|CJ%*(CF~)ki!`lAQjNyayKJ&2;$pO9d3sNCh zsJi#x(Fd@SC@bQZu2Fv%;xIkk@5919u3fa+e&9&0z>70+nc5tIGHw~i!1w{78x#3p zf@R?iGg%%0maCWyR%}zZ-Y~PNb)cLmt(Tj_~ zQ#G5HRByar@&OzG+PxUlu-(X`g@1Nx=T8u41dOwvBd!`nf>g7)dPo++j~gTGp_%Rh?d*=&qDkC*Fxk9ca?k?*nc?&KPPZxP3L<72WS!(3>fW` zi)uRcB!AVnah8c|g?P~v-+ew1_wsF`D*4_8P6$ZCbfT=aPs*2|?_VtQ&HLpMA1~n% z{heQj9%gWyK~U(#&WiypVfZTHU8p*vgkk4hADK7L{{(eg<}YFLEMY=vPp`Tkrad6L z3;O~ofe{4uU}YQz#y$c@Pkdf2m0vbtVD_^oyyW3>_z;z=eGY)n-m~XO6v?p*ew%86 zJ5#_Tt|~5?u%aq=$8juL5EqSG(IR)SVl9e`U}_TF-%Tsl!nmktMN3^liz?%yAuFnJ zTeWDvxM;wN4svhMqD)-WZ$-=8OQgu_)MwyBU8`aQoiW){u z*pl`hK1&aG!-AL8b-DBzwbK#qJ}rek?S*`JUCq_m4m{Vh9-qy6ILT9UHC|OlpY?Az zYcaDcIp|Y!H9st>y&A7w$F*w{#Mbj(YZH?(Kx!wD@ve1@T%AB(mp~?=fE;KEa)MSd zL8~G`YfXX{=>}SXG-(CWLMuUfC_#Ffab7Lblpswufpp+R(t#662Tnpd!D%4D$rc)< zsmw{#n&cWd1;*qQ7?V?AOiqEZa7r-lPcW864@NmKZcM0{ll#Cp$U(+I4l)jMka3WM zjDsA)I3Y)0LJnK3p#fGVo0Km`d4@kdhJyDTC0I zG6+p6gV2;R2u&%2(3CO=O(}!Wlrjh{QYM7%P6$1wo;LxIMJk*nMC~9nr3^w-${;kQ z3_?@NAT*^6LQ~2hG^GqeQ_3K;NEr(quVadEUQkLF1f^s_P)ZgArDQ=+N)`mAWI<3$ z76heaK~PHO10IFBwG%l2Mc(8AS<_QIsGVMG2Bolpq;J36fD1FIisSfQquh z-it!sK@{>1qL6nGg}j3(R1( zp11uBc7*p90y9z%%t$>jBlW#t5n zSs~%c5kHV4ejrDDPcGlpt>Ord$0tI-CxXY%>knDUhdKiSYj^~Fp-D*!R7v71O#&L_ z3vHZd*oStoI9`iHXL=cOlXb+>OGZD!;uDqv%SP^8h8??l34ZdXi{(1{m5?N%CMCp6 z=_ghQR6?~eA>Kv$rg@T3qY~n!^xbj-l~6@Yh<8!fl;9-^EmuOkWVlBG@pV`(5U7NR zN7Z;2bxoEMs#ik1WVl}fLOOsI2vkDEBO%^JUE`&MYLyT#8SdSHkcR65fl7#YB*eQ& zH++(+RVX1|G8(OX&?*q9go;v)gm@QeFi8^9Gp}8|WVFBgZeJi!2@S-Ac!wQa0VD}c z;s_i3gqMu=Hs9$A1S%ooQ6k>i&$lNDjVmEuGTKjkcPS94gnD9$co%60NfIh5Azm{6 z)luyv1p<{&M@)!!`s!#(wE-o>OUD0n+j>EZ3Ir;l4KX3!*(Z*Y60ISQzRa4@9^^l_ zqrW<>gz93?=3S(pa!3-&DIs1m+Jo|Xi3$WNq3W0r?;`y|Lz2)+CB#ccdr)34QGq}u zG##tPyGVOcl2C&Z;w7U!D6f~OK%f$GF(KYX+KZBe>XZ;K8SO!Nz2HktQl^9^VnV!& zv==1_RVyK0GTMWD#!(03md>>n@~=? zXo1O=7P<}9OtzHJ1t~DuC@tBj++<_24JjL5T8Lz_QD`FZu7&<%vP~)yFIr%-O=tlx zT41uJgf4Uh*(fyG@J^9THcD%qyM;(58>Jt$u(ZIsZ@OP^LN)8YY$=tW;i&|i^rG@SbvZaJBNP)>lp~;4KO3h@Ww3Y$35Xoet&_v>03kPJA zjajOFms1N&wzSaFWM;RN&;=SVvvw^o+0sG}1=&(U z7o@;sqtIl-JEdl_QCjcTL^9bZG?94M!fe}QtCu;5tr}>7$(9zn7-UNcU62BkjY5+R z@06O!mNvE^nQRoANW2@RY)>wRz(8mfx>EFawirUkNk;57q6|LxEte;!)wIQg*dl48 zI<*lm{|nRVzps$+#8yV>pGtx*`4@O5L!gpq(uU(@KFg{MkxC+`BzT$6Y7t|ql4w*C zyv%1!c3SY1#7ZT>%Y2raIHV|vUE{s8tfYq!&-{4RcoGx;-&C{PE7#$Z*dd@aE!M{yRtZY@y)fM$!6!pK7R2|k8emN+)_w^r zfOWrQWO{+eg3&!}e-DHB9(E8YIi-|IPy$%<@sa5&j|HQ9*isLJ_`0zYoFFGVw38$` z9ecr315CVNLOCYA;84_1aPWe<0E@lgumBS;P$y1#0e&b`_h?3@V=s7GfQc7y>=;RT z!4asT;OW>4o*rPa7d#`t4)j(y66gQrSZMN-26p~mzF)zEyK*peN^cu0b0d@0xdY^# zxfr$pu-XI+zbhJECJ660{l)a;NOA@|>Sp7i-@Dqlt5|xzH7_3p!x`5Tq>uXZpc4Y( ziOsjO>n6^)j$2WcdxXs#j%&)t#)!4ZJzufnvzj~MFC z7R5(Ud7M=&afCZpi{y-J2kJZbh%DO~_2fa;1#Wo>gXBThDz{bel1D$+i%In8=OXu@ zmd2(_9%S9$-J+$j^^yl!m%8;@8XGWqkhR8rR!d_WCJ(Y6e+JSd}qbOP9KXv@}?iu~|!N-04~xtjc(+r3blN zFSTIT#?oC->uyz5sP)Zls`c3qlHphVO_ zshFnJrBDSo5^K|LxE z)T07Hy#diDsMnuXuUpkq*HgBQF`sd%8>ArHAO+b5DabZRLAF5(vJF!7rKRXlDb&qX zij`?8f>4wq2t_G^P?RDFMJa+%lp+X4DS}X&(n7VVP};jxsO4#)f{YX@$Vj1rj1(%! zNTGs^6e`F_p@NJQD#+NAmT`m1$n?efUPD^OATnhPB2&g7GGz=RQ^p`NWeg%y#vn3f z3?fs;AaZwF%Mj4RTY^AU6dKa#PSCHw6uHQ_vtc1@&_K zpDRG_6$GVZK~PE-1f^s_P)ZgArDQ=+N)`mAWI<3$76heaUQqn%nGKvx;qbNZ#iV`HFC_yrc;w8&>jBlWCM72%JUQYAa>Nhhi0{cEXHAk%j+rwv`OO4&H#^xoXhspiX@&bW+{*QQEO6HyIj3g!6q=b0MXteS{ zt3aRAJg3-&vuzV9Rj0+mo*Oo(@wCxq`?^@WI>;%j@MdC#Z>;%jjE#O59>{L^e7Vy%-GlzBpCMN}C!#hQ?6EKa6 z#ET-?37C~J67O25g~_&Fk$BMplPxWDHL96xDWSPdXtGgS%BFIYjmbu7&9_>JWU^6O zBB|VL0F!Nl%En6z3rx1O(6vFfl+fG-HQ6XM+3-%OnQWBS#+Vi&nQRm~Y3w?1GTGWx zHeOojUnW~xXu&YMrG)023?>_eCL7)O{DzJ*988>J7nx|_*Hp~;4KO3h@Wv_6Ou$z-F@ zMB-fw(lP;iZL0CL4t&67O1=ZJTUEio}Z+m}~=Dz>5}` zY$>7n!l%hbp~;4Kie$1;T6>eWg-9kFr6m&YMk(8qiy>-!x)Kf@ND$Txy`(AsYVR=q z6$fm3OmYmF(lLaW^qY$~ixN~L9zB6~PrjV3iQnE|&R!>uk~H>THemmKg#O z%Y0Tw>0L_VFZE1@KqcW+3SQ>^o|QSbl9*Hyyv*mD1B|IkVnRvqGM`ted>|#JG~+ zWj-G)U_??9j8ixaC6wm#9yY_Ak|?Sayv!#9ZVQSGDG6TY{t~x+FhQjlP!hb%=Tiym zgB1Nrf|vO$C9^}T6n#p9m$~QO|ChV6eU?Dgn^yIBd9wea%itKJnB>X+OI!S%_FpE_ z;Y_O>2R2ZAq*V?!8>|*bN_9M%aj@Bjsno${gT+T$g{GQA(u;n-!cs*laL4*xapHr3af8oIcoWu++h3gUP|>4y9Cjuvx+BgUtp@9c*qR zdVIQ_2b&ERdx0EmHkf$92IW|Kuvx+BgUtquy+95&8%(@ly;4dZZkyJZVDM3Pl`&-LOYJ}YVAz~w+;Z-<36aUJ@~9VJ}o~y z0NvaN6i@B}YLbhgu2odj=J)B{ONl+mD){k7LB^-?Axp2^&cKfkput`C3Qo#LMPDq0cLHe_@_zh2CaNAb}u~+WFK$RQbASK_^61hi-XCRjN zvX+d~5~BIABrNVy1Az>?5$3WE75OCz%VB%z!X3j75U+?imGcLrusuNPzf9px z2|CpRKStpj5^zfN6sw=0!{2Cm2uQFHo5AFHiqN@3;o}K9`wM)#!lx2+NdFrOuPv>Q zA72srJqbAF6x#v5ycDj#a8h+@lk`*kpZ@J@>CE_u`T74LA2BcYz5#x3BL3L5{MLm1 z7`nUfO4wU}WT@o#0_&wdew{+TjXj#Z8)8-9HzH5TK3G$Dn#ibBrrp28QNxc%-X?Bq zMY*Wixl5Hny@Z~Ng@}(oO=5Vy27%up+Jw)f`}5KADg`8zuSnl0;N*9=ofTEN3$!Rc z8I8}WD%K+RIW3BhfXi>DlOWE2Yf*gu8^`{%XsKJKMe*TpzT_Zc)VQ;>C_eekmmH+% zAonZzd5grcZ@%OpMa$eaDY7%)e8EA$hq}iVBR=rW7aXMMaQ8_q3Oe|d?&sr38>ae0?-2qw@j67px`B6hFBM zmi~1IDSfdkXlbmdf89Y!U*g`Qr5Wtx^SBw7@-6h1f*|VJD_$Fsl`~wlqOH2pj9BFty|ucnx^fudS{v`M{2pZJ1D!t55s=B? z#S)OoU}6;nWHNNC43LbtDuaAjT*P8U<4$?VT_vOW%mV$W-VX}7z}*WZAmeTV@o>3u z?@{hzDyeAPy{QDG+>@qi0@U~pfbRfs1!P)#Q3PasO_YLwjPEApE3GC_kwA^FH;jOcZ)_O(>Thz1vI2FGGV+pv@?wDatw_8Bi0X9aB5ns# zm^#+!8;BXWiMUGqKBHg`DCPie`%;*b93}Pb!i;sZs6Z$e)=`~25&zKJlG2VVCNCN5 zdhy$X!ZWsS1lLMp78R2htYwMT))#T+O3IXT+;x$~OPwT%0VTl;7NV2{FY&xlUYaCc z=*|%gnxrzVRCvk2M&jx5%cG*gkSdR6fI*X(PBD4Oz(x`oBLCW_V8-4M!Kg{h3B}|k zV?iu_eN!+K?$MpX98*kQG8R7K7cT`f?nszGl^SeFk$J)5pBju890ip!-9{lE4aPfs zS`js^XN*D1F?hFttY`SnA|UG-)>#SyvYrt;pW<52@Lg9bTF;1m4WCT})cB5r?>P7h z$oP6u1Y~?o6uyyTeB;(rT;uDzuHYKqxUpJjIO973z7yapmN33v6ag7u6NN8d8Q-{@ zDz5SMh7nxj8w``}Fs74WI+<(9b{Iz!BHLl?OoG)N#%oMH6z!7jFh*vH)g8tqm?7J3 zTpYMKpmZA-&$ZjQ7}wR^#${Z&pcS&+#>Mnr-ECZg(%By4G6gPEK_fz7i$#>K1KV_af&N3-&)<`wXI{JLY}d)LchU37e2GSE@<#21hRs!?T1nM?t5 zGKE>Mn7m}r@Y48EfscwgshE?%98Y1^Dkd)(bP?$$@!Ry$F=O-ny+wSN(z8MJJ*cg49p=NI{tEjSSW7#2)0XNRx2hi84Q3jILF@{5X{&k z{=R}cT>Nj`OLTLAJzeOo;_$MsOs?N>=97%{84fc%`>Mpg^1e-mGoiBaviH>GZ{yt( z`=9jcC%szcH0lTQ@oMp%#AkNsOYtTIwxS5A#qu7)VCg0ll8W@FbqI(z9l?WRAWg8e28jU!>3q1piP9s_ooSOeQk_!XcTAJ4%! zE5C0XHkb%V?k*94ttGr_JL7{n7+lj|%OLD(VAT8Dp`Xw7in2;q}tPYh05Vsh%iU^Pv zgEOVJ3b9E<2iwn^5ud&xKw4}QDwpgHVFs6IF>DMG5Vsh1g$PJ(3CR-d+V3~#^1qRV z*zMWf72C59RpMtZ>yhNb6+8J8mcX2jR^wML52*$$Kawj{o%!EbXU4By9{0nMN2>AT zl@A^DL-`3y{A}cE{J!Pt>8M$L6!Sn{7S^6P(z&y8 z0E_1BNbj;^@{^sPtZ#tLi1|*Hf|ym46t&;;b80xoFuJN#h`_m_uCs^#%MQFjn%r zyMD#>d_K5S4g#ia9@%OEe^1jU$h+NhUL`X6-K9n*0aedFE>n@;>MlKk$OPhtnD<=3 zs(iFxZ2r*M0@9re{K%_d7ltK9xvj5diGuWpmtiz_?wd$MlbWjG2H_(=^875alAnXN z-+jK-EoW8Wj6OYxfghL7cO8MRqqz4{8vb~+Fo}8zecIb8g3x#0s}(%%bZKd>@WNMt zqkHIWl#N1|)DU3<`RSJ7gX>s>t1q!$9irD~&n;6@FC{83e|;QWn*{XZGg(>gp_tS{ z$rPH$ed#p>$vw89oPefI6Cdr>*5l{9Yc}VOgrAEy4N4UtuaugfT8p2Yg(qK(c|?Vq z{Fpf8@Fo}qOjzMJ7qN1^f~3TKay>y-OG@0Stut!9Yi6y(w3g5XdD7Y}wNz@)am70Y zRJG*fv}Z3C&)&FA{1?u>9FiutjN;$B9@l>HlNTns;BZd7DX&h2clPj9v$;ZhTi1)E zY?O=TKt<;nUI|EjP3Id)iM79Ts@xyXwepWI!MMWe__0~^N3C$=EENWGg6z6Bp{~3CWq9^-H`ZgAxv+RJe&g6LY|W6q zs-|y%I&yjWqlzSjyH*eittj8^J_AefKB5KTQDQ9peos_& z;Ynp1NE!eQbXixrf&BNqYGwC2s(PLwGemMj80}LnUX5K#T-?`DPaLzW2J}X|jsQ&l z+sWU*^`VOx?xvTDn8~=~o`05rbZ7egaB&0{X?4_MPlawjUK1W|bN>-`F^xGdOCd=Z z5`(xm5Q&e4;SQ1W2ih;Sd+UjS!V3q!yr9Az@Qo0^&iW^bDPx5iwsW*07xmT((GO^l zCHG|Mpp@o1w>P+Et+7F@Wi=v}x>KYi1@csHe0^z&+rqOBYV9N=ijnKw+TeDr4oID& zP3{JXhH=%loN_f5#JCn1{FUkBscydOF$p9A#Vvlzo;~>y{Ejx@i^uZ%7I$3U;BGq) z!JY4r=C__$Tzunks3VRQ@h`sSPI^_JJTdLq;(_+@gpJ$8f8pK(A5FV=>G$^jCcnQe zDURd~l6PkR6n>2xq2y1CnZ_c_El&M7ci3cS{k z<#`OHK~nA;Git(9NaJ#5A?h1~)Cyqf=xTGe--SI(`6geSfq z$(-8V_7QRG@V&>*UG+TdkZ+w5XUlOF6O%oz6yu|epO!EwUww;O9kM)RM3Q~hYSfKeKYuK?T_~RuS z&5P{Q#vo9AH%kXuiqli5*1Gz&(5SV-PIzUKToDn zPr`Jy4i3u2gZte5$VcGQa5MyI9P|RR`z2w6av!vBlR@YEEa9*O=b&8*%Rc-CILFtt zWI&qGN)W=~o$I7DVktT3*te|ZN`NmSxR;D{s0p}j?)6HtUevvo`RVXF7$@I#NwB}G zTqp7m>tP?2NQREzy;;C#(I4|?x&)^O(ES!&l4eJd~Y9%ny+*3lN=(dx*g{WlV_|FQ^5r@*f;v~Og+AT z$~1K8r%A($un8jcfPasB&q=~kf+_%=+w0wV1kkw#q!})yZQ1s<@~r2BMo|C^hl!f}Z&L!!b#$WPQW~$5;ldp<6$uxup5JL=jZ~z~?~zqe{d|q${Fit&|L% zPR0rnGPk-a&DRuP=Y=4CS+b&vW0UEsX>NnD~{!FIoCW<3?YecYfwCA2qXF{HLRYTAq-D5K|YH^C6hd#HTY zVX)l~G%B0QtaT8vdW9)?Pq77WajTer^lC*t{0fTy<|!72i&C+$>}a9-9)i6MIzY)i z_EIVKnYc=-1H%`UveXiHWJ@tBYQ9qF)p(>lqhWC=_Y@Udc@FjnYc0X?tp+#6)f|t< zN^xnEyGculWCKxL+UBm2(x~bW$CT{}dR*Ea*fv|dsXg%_e|+)U2I;i67IoVPF!kq& z%9ZWEx<^!-tGqT`g}JFq4G!%ti0 zv2rY{$g*mt2Qv87SdB+oTpw%kxE>f%wC_9uW&ssw66l05mCpLy(o*I=)7 zpvp`yflHB2wbC(l_euwP&un`53h7iR9n=xq?3IpKV@7+zMx-+>BSO@SI@sFCkHJ3U zu+L7|XSLY}HYzMZuvn6V%hURr$|x#)sCZ$TYa z)?bz&wwlv^YGMZVQvXPVER(%lEgscae5}Kx z>deP_%o`<~%l3!nla1Ze-Pt|4nl1RFzb$}`LwBmbx2=-Id3ry-;83{xkxZ8Kba;c? z5+0h|e}{)QcakI`98|(uhUcHde6<_F6N3|GtXNbfJq$f6RI{c1K6e>B@5Jl7yWWEq zhPAPH+$*=o-B`1w`M#rW#OCC2W8GcvX8Xgm8@T8yjb`8WmMJ_>z}9RTmReiDrNX_K z^@knZdQejHn$6?x{Q=n;A47hz4FtqBJONaya9TdBmq8YTzzYHa^DP`hn?ik%5MTBr zki_Ht8!{i(ikvcw4EI4kc(N$KUL@H-)p`Nu8P7e5g+k(eN(Xhg74*AV3{W?4z3dpAkilz{EVm-NgFb_ z6TsfXxMJKo?dno6=^IkF$u)BI4ZTd$Ve>wBrjXsZODMl5KF47TRVv87=Elz|<#6Z_y)13eff zS}p3LW6Ris!w7m0)p4{y@1X{USG}*F@jjOJV0a?eFK)%;F05ZSuon;NrB^QQpI<0m zjs%;F87y*dC5&qq8_+@A1J9Q_I?=-v^4EOSlvY=_8l~0=#L_)Q+iCiaBGyW01LA+C zA3v9VyhO>ea=eHjb6R&3*H--GRHoSOGEPC8`_Mt)>l%&Kit)!rx6 zxphJ?T|Wv6b8Kiy(1m@^dLG+pKqXx+!tZ{j1h}pfao;&vneRGU%A%^`q2dD#ue{KDH4(}5VKNm|N*}HKfBA>l`=WXHw66+*l%KL+6~qvS#Bnm$**=MF&!eF0Xm;Z5Z5!S)n!WkmZ>wK1njL?^bKm+equCoS zdVBqaJH^sqG3j1`5sTAB{=CXUw}Q_%OBrV4)$Un*Mu+6{5_d44Z@{y2i+DdK_o%zj z?ZyC_&u+8lYIhHxi}t+4-O6XINwR*e8|CwmJs;w}&F5?Exz2r+&*&%lN6eIs>9$Pv z=6v?%sQt_IDHC_Oph&(N2UiTo-w^H zlf5CIy&-D9VnzE6%Jg@LLiy|!QTB>V_NsjLs;K?C7426k)1R>Hx+t5*Bhzn|$0O5E zDPAzIwkeoeo3)Vv8iGJR=zJTg5>@q&4^P2ot^rWLHvPe)QsVP4bp zZiEk{3N&v5k4(o{HUp1LB_9P|Ft4^L9Ld@=Vg+?1^@4@yPT= zif2sGWwcGl@W^zl;u+H&43Rr_o+7LL z2$50igd0Z?4{jhlFNs=j(EL^F*G8@Vt|ycq6186GI>K{Z)Y|7Z zgy$oo)+^jPJfB!ZJl;@}Pw>TVoEwCmGC@lwq!O?5?T_VKr=#p+7)X9L88tuloqA9@ z9uL3E-?(@zS>5NawG!WD{19;qtq)#{*b*#|UsbA;5&1V@i?Jyb(VsFJB3gU4_9T{uiJw2k`?aRgzm^^ zcVP6%Ac~Q04WZhrhcfMBO7~%KUy<#{G9^Z_4Ej3h%J)82*w{9aY42CMKPGfc`eXYw znf7f;_uGVCna^H{At{57yI5qvK>|?F;B<4Q{Ys@95xNcI3Wlf*LLKFm_`1RduD%Q+ zE9HGNp&Dt%Gwpp!_e?^I68+kX8N@BhyMj=S%>$YCqSAdjp?z4drG3Yh?jk}pRzo+l z@BIiu33(@!?(KxeGMKzSAT+k`4NCWFLSx;`zMmm9mUo-d?IctK8rs<8{U<_W`wl7H zHxL@@X7)Xe(AXaal_flWrYo*@ExMrho| z<`2V!(!SX%I^6;c%sU%6`Iiv8!~GcLaD5B0V@~SZGeYb&?(5+hA-2!`dw52O-R276 z86mdl+QTzK>~-#+!ZSkbkXwuA5Moca^{l^fe4T!ikFPyG9@o#vx5X&3V}&#w6NJgr zh9n%grCxqcIlcTFcX24PWdonGpN`%g=jxj@#BS3Nd$C>y7-_lZq%rpjiMgX)*yY|M zcR42g7oV+0oOIvA5)zVG?jBeezKS5Y^u=g4jl`cJXu$7~Sv>9}^ol%*qpoxBB?SHd za(QHMJdYrBrXB-TyH^o}esGC_mbjG!p%Zi)sMbB3psVs-@6subkUPZH5ZdoI=PX)3 z%-O}2`7Ruima$%A_OA)W*=t#i0(8pVNC^66hdffgEd*Vc@A{Bc0<_xo6FTH~2rZC* zlF)1NT^FG)zWs~8up^G_$=xa09W{lU5oWOJBEgg4tOFP;vf8;Blo=-9Nod?*$F#e> zj?n%Plo=*pMyLiZ22av$AXGaygEHwJL}=V$uTi>>VQ5SQxhpkH{+3X*GJ-OLC+Ut8 z%Ff-+piH`wO02M{0E4plth7}E!3%6loH8n~{AhqpFD zI=r1s=oK;z5c^vAttT`da3+-Q{)8q1IOTniLt!k#xZ2?kLSy@mX}gUP8tb-A>3);Y zSl*)ASLRcRHn#BlZ-mD7y+-M7AT-u(K2YwM$h2;9zeGj!9-l&IS}%7$ z3?Rv(GdRa1`GM7vL^{)YiNl#N4?>_H%d~d8PXrKR{kBZ&#qPragm7QXw05`)0|*iS zy3C+-Ro5J#^%w^-gKYvmH9(KXXp=(8 zk}pTfpBY>)(0c;(BqaWsL5Vc(#sFQ3u`I)NZ};5*eF-K3nL!D@?#lw*xJ|-(GJUsv zypiDf=HqdF*^k=yc*FIGf5)~K$IIk+GxRs%&k5Yq|3~=$jmP_HcpqkU*htXve#ZQ% z)SmVGXy1L*&lReQ?_0nb8~5vmI^a!-`5W4Qd1?Ev!nnRFFAL=9yaP+~6RWUr?!NUN zcx30ccf?~f~@bWaEBD{$YFJPv}*_cLe+}S zVY#cVDKz2%r@f>!cRq{p!}pn7<*3UcE6(Bcok8=L{6IDwBG!>*5#H?l7GR}ht7{4` zBw5|SJ-;Bla?z5p1g$B6N@;BYtz1;~o(Iq|@Kb&I)S^!-_33>`h`$f5MBB;a3R-)vKzf_E!uGa7!Inx!;^i zoLZ?Bv;D;Vb7U+2P$BfMjeq~O1^AZi$$KD zHCl)K@_I?LrOF=+Y`x0$s`9hbyem|EUZP>T%Zg!nQk7RI9m4UASgDL3u~ZyhqTzaZ zTERChROLN*c@M2YxSsfe%2(f)Qn^A&@RDXrm9JA0>!j8k*iz;0z}BnW0DSO1Db2fD zdGittOGkkC@}w#^DjmY@Tq1syP;q#Ph8q+|o>b)}ibJ>_;{)3*_oh^?Rua6V*;3`} z*U)O~rPdtSQsrL75w>1saWwFLMVfcIFOx^IFRw1gyHVxkCH63RQk4%=oU~*A_zx+S zXJ<>5Z+Sx*m1k#5mA|Ms!q%%?uPVPL%{$$x7$}l``5dK_jw3l$nU~nHaxLzm@`g0;Y7y8U zt@GhafLrP%@?WHM)=8~6j;vH1UhsmVJS8J`qv8-A4^od{zb(O`lVW_MVgS{e15!mk8nNogswp6}ier&1y z4X>raV&%cR4(g!2FHZALcQ5)>vR7ZMbO?`KoDE%esHDnGiX;32+o#NxEmdARKekl) zw%1T)Ve4JIURC~oE@kd?_o7cFd-W%jPTJQRMHzp#e&z_hR&nZ8Wx2r8PoFDWs(kqT z*iz-+zM3ivKd-X%aPa=(Jt>vb-D|ywnH-@%uXG5X!%)dqey8FHKeV!3!1jc>vZcz$ z&W|ls{$mqW7Jgo3X>suW@870WPIoG*mK>q~OX(0EtDJ6SI&d-%`+(x49s88IvZcx= z&5tcrt_WT{?W_r2X>t&VhGq0qSsdvEm3nw6d{1 zYp!gm@@ey9OO=-fwqE51ZRMZdol?0%#pk8W2>k=>K|s4TDn2izb1I>m8lgX-IE2TJ z{l>YnrOIc|k1bU`Jh1gDOZ0|g-+?{TISrL;<(pPh<#nnuFQt|^Kd(51$7AFA zxw56o=gyBURX#SbrOM$Ce9e^q^<63D)4jS$6$vBs&sUXk@#Ru$_Br%_C{9}Wf0`>> zDu3Sm*i!lD1h&4tC9t5qAGkB6a=KU3hm#}pU9T$R;wx2UUgD{SJZZ1~hT_yotvE6G z&|KM4mMXs}u=Og}tICyW-t5kC-c27)j?nj=RmQ~|Rb^h{sfIkM%HLI-dR192 zuyiz;vUG)#;HAtQx@~@Jsq*Q8tykFsS#6qkwesdAJ(*>%e(X%Dyk5oUWuJ5CF~w<6 z9JxT{&;BZV@s7^9oX(f1}y7g;%MM~WSV!nFH@)F2)(!*??#oEm)No8 zNn82nIhrZ$*k8LNrSj}-sq!b41jU&{VnCJ83v9j0^{Vpo(!A4s*}_*Cp`WUB2#+Ti zIaQgL*s5uTm0oWlNRY0$Z=L3>t9kRcYSUsxmKe z9FeCj?90oQ&N>yJmwk@Vk7cQ{@Po?6_LkdID$mZADt|{w%#|%w?h0(Fa`+RZcy(He zboZi9C42P>rAT<};>*=wyu=F#@}w@l=kzkl-~Nk~^0Tw0^4BSexw575-GQxdZwV}D z?{#V3>F!0JN{-NtN+<1WD^+D);y@-(s`3)Wsgqi9{QTW*DV1kuOO>yG1+BJ5N$^tY zW+ZP*HB`A*aRlyFuE#x8eovZrg;1S+gkG(5(!SQHIK0GxOrBKb!xX1pYQW`RrwgjAv_Lb)pKP_m4AI2RbDTo=fIXKU#&R8)~hTn4wVbHrp%r0ROFHD%kL=1 zyHO;GS4zyo1gI*nP#nTz$3AqfY^m~8V;PlaXG@i@Q=Br6eFYAs%xS1(?7m3pq~l0V zRpupjYRj1U|zkZ%lxk3bs`|=uj zN@m*|l@8(Y5}*WU;P4V}-^i1yyjpRDA5=EBFPtk|s=WXF*rIlYyX9pxlkoE@*Ww;3 zUyDO?iB_%_!De6OzEtU~Q}KBz4V^+aHHSV&aR`q`=+o!QmMR}QKekl)*Ds~Y!j>wB zKQO#0|Gl53lu!5SCRId$Tj=7SP>O`lb%b6UqzG)!oGV)@fB5{^Qu*KFwJ|hftUP#2 z4@Z0dIQaRnL>BGrh{a;Fl@OUGqQB~$84rKDAz4`-+Q!lmR5&9i-WlNP$njc%LToKrM zl`W#|!n@|P_UiQ_W^#lc`Ujeh@Hq^XT;;x4afBaKHn#sfSGH97wE3~6%1Z-Vud=i_ zTKUnRrc_S%Wm7ry4`BTu;a>};uW*ZkO0G zfh|=&FR=A0OJD)-bN)YdUjtu9QSN=xB#?e_f)Y~c@uMi!>Lr?T8pvTeQa%b*P@>|sD)(BI*0j=^U}=IPh7UDr)dFOMl0J^EvVl=3ugXv! zOy&=ee)D+6ipejYBqonnteE_WDnoh3WY#!j|FqXJ*=@@r!*0tf(`37Fiv!zp=vGxm z(nH6-e7s`C&FYmUv;0wm+r#xN*c|{Z*brzg3lS9ede$ z#fr%29S#aD~TlsAHqqP~(PSCJNzT`k;J-A0A&R{b@_QPRiTLw_{VA`*N1 zgO2f)#ftH#ADCD%J`;&GCbNa3z8hTGZmSkowp(@2@dBdj;w=QGau2;qm7zSC%pZvT z!gm~#D~lDA=N*_>F?o6<)|i}AvQKbjOLMN=LqFUoCc7@4S0>9(N)LUjDw9(t^9Lq> zW3yv&MzN5eux0U1eM{`Z0~0GIzakQAOlAv*>@!{2Zu447P3#`}iJ4-uq>s0U{+ud9 zd8lRnKy2?O$K*O@hQobfZ_lAGJutCi@(GbxW3u+Jf-Bo?R9ZF@x5_x|*RM)3Ij{7U zpK$2OJ2ejbnkwTu_K$iTlPilA$6k71V#VasBeBNhoPv9yE8A^UI(#uvnLK)&nCylx zPnj$~p=0w-ncSnwP#$Voi~SWoqG0usmBot5%MVPfm^?2MYfNrZCST#o&M5WeC#8pe z0d)-rT&&cWpZ)F>dsvmByvVVi-RPKHS*)0R{eg)UlP`?K8k6gl$sclM*C~_bCyXP! zQ?-1E; z>*6iSN8~3AWV}@BeBNhoRYof zTaL*Ya;@A$&s2O|U(2g9@)HI!-YJv+bhMb9BUKp4jvcR9G5LPQVu@lQKl|M&wjvU1 zOm0GXzWT%c-teCt>u^6vdaqP}WtT9=8 z*tfpnnC!M?ZF1B@pPMF|6$;OgpU|;+r)qh!DkGj34n5BuuUIj8`zr)B*W}7##pK&m z8HzO~=WvZ$erUa8vfGw*_=Yz9@j8=_`e*J>!?^b>|Me;yDeX-_(=MASGk*28OnpnTI_qrD^^T? z_GM!7c*Tmzn^YOf6O*Gqkru!GPsez-RktbQqgH*D;wb6k?V;yHT0~+$I9{=0{MeBx zF5XP#QWw|7_y<*)(0IsZ3rBtLy5BL`ZPnVvqgH*b;^Vq_i;^urVIbq3YSnpFrh!zU z*iVdCteE`b5h*TSS*)1+h$=&|#>I2EhRM~g> z$*rnPj#Oa}UH+Pbbf%(MF?q^?i4~K7GDBdZSYxsdWG``LyRCXDHL+XurxYJaA8!x+ zR#k@bU^0Jz^gUm7Os->Q7)*q{J%@hTfr%B9pLwa6OnJs+);MGz=gM~5vX&jS<+~Lh zNe@kCg+gEX2_2hvs+O-(Wn9Pp+E*NtD~pv{ZagrtV)EEa#N-smZgyq6ZCQt}s4cHk zeBAKmDU;G5NIzCRR+I5{WgA&FBr;7rL_Rs94yjmhjGQ z%MU>VWJ~&Zd+1N8GL(l})?$bL!!fzCSTXsH2PRfbepw_|Opg9UTD-;8!fjsS%Jv-k z;j9=h>ErF8Z&hVn)xd4@|6>e0n6- zn5-j8hb!A{UQ3nwOjIV1P7{+QJ?ySLrLX*ikL!4+YPm<1X(CnFLr)p6SgGaZ2PRfb zo)?KVCTkD7G-tD<&_E z#2S;?X~5(M?{!SBQzpw#7)N-gYB{I)EFsl+d+5{dNfaddsHSpgw)H2T>fL4@eOlDw#>_6V^nC!OdrBu@%cVD3RNcwnt=p$7b%0n&l2Vz%@ zSFD))WR0jlUa?~Gy{Zi58I$Q~kp1Fk9FyI)EHdo2d`X(@JdFu!Ssd6Nhw;22WJ`MJ z*f)+>te8Aponmrjv10Q5stm;%lUd_1d73NRZOalb?9uj>ijNydJY}-{q|Bk4RT;`d zEo-rNe%e90vREG3&t zJj`ZpK0RLNt@U(LtW0UffFUy9I{&l)RC_FEZ`J{w7{&7Rpq1PB`E?x5Iq$Dkne5c$ z_w4og0xYV}8|T;eJ5XAE?;iPb_8N|KYu)8Ni35HX?cS6}?yS~brANFcpY}?Rdpx); z<$NG~!#=nQq#jT6Eft1w7;u1Fe9pc-sLK{xBnxNm_=UiH-+dj^N)I6kM+&tD-W-4g zqlEJ*C?&alUn=w2MasOyoK%GCea)~ ze3t>E0RtnCRsy&VKEUQv5+ef9`jMmu+LWNe=K$Bywtj12$6*~4OQ=wvQFz|QkmdTc-<-hk1 z#YNuG+1vqJg6~QJN$~s)1O*7tqS8Z~gy3Q!*wBn59GSsxR~39MN_%DXu8$BN1K)@5 z{EZVB%Xh+#g7pH|X#hVq>AX4bq}e%t1))2Wz2R&|Mv2iKOQgdr4EJPsF{?0)7>(mA z5z^-thO#$orW!o-q}ad2t7fp5gW`sauif=WYSqdTyS?fz#ZCS22o%x$8IEN9+I#w_ zxt#d|cH5Wi9Sj-skZ7cME4Q_;@5=YZM&gax?f9+#|8bmE9 zl{uW-cWbG;4N4zCYZ_y6V`ufhobl ze_$My7CyhFiqT597*m3`C5q`bcO8pK5LIn%;q!Kz``hY7n|q@a*hiat`VZuoY;)g7 zg!b$<_m$g0=+|D)`Au4zV_l=o-3NZzsXX5Yu3?Tw#6ZjQFUWiTn>}+(qHa9HGBm3C zvsZi^j=Tsh4kINWzdW72bPwOxg~ zI@<37vU*5zR}?fKzlC z)r51P0}wykB2Bm(hIE@IjB*v4ro-^g-qZIsR%JK#VsI=P97_S^b4zt#Sw3U6{7ko z%q64EtVnN`!|jm~sr%pYeF>I|OqOv1r8ft=|3;5d!#JhUvbx}zD6w8A)(4M8i48ij zA=nfp=5%5%xGzd<(uqyMEm2~#PHYZ79woNu#FpS)k|+bgwHTP*h0(ai=MRCxXKNnN z2R}r^F>q|9{lRyT0-r!8Mk6>2a5ZZDM$W!0*Pkqzt1$*#2FL#zIJ)CUGao6gXv<{R z^h%Ap=Ygs52ZC-#&r(Q*FOnxT*&TcavwXB5x_Hu|O>Yg*@V?Zl*LG!KuRUs}+3 zPHDi$U$3;VG4GZB?D^O7cVVMDq6<)T zg;%<+ahX@TwsC3K!p5r>l;$@s?pn}zi8ptCpP|ck+ zjBFPNgNUg{7?W3IHv*KPgqY0uC$lcp(ai#%ch*3*6AQA$qcqC+hcnJp{PT(jT0ihx zwugGDrEco#m;UpiZm18PedDqgZwKL-gpV zyY08@23(=;f}N@ftMMXgv^>4~Hk2$)AH}N{_5{M}-&6}-%ofU(1NBdzzm5ioV`)pv z1KqXZtiQq_&ifnSvH@DtakK)L`Rt9@Pxd$1S3 z*-Bl=^^;&?`HVZ|XZn}$vzfz>VB{GC+RFBUeR9Yv4ht2BkCFpNHNlrd4m_6EUp|bk zc@W+jboKGHLR<}GH$F0-)%$idl~Q4kU)Y0~l?QHSzSU+t{9xkJHy&J$;?TK##wV%> z&-5~WHZw*9k3T(LTzh^RmXJe5T;>rXx^n^U{73=+7nEi+dI+<1 z!4o!CZ8F3$<7ahVq~kwy3;2z8mZ8X5hV!noOhR`8XBpaORsUB;oaH_8GkpbqHlIn8 z9{f!N%J4opyiajBL2tUGk z_1A-Fgq#dzSDzXQ7KU{Q`S@5$F*X+o4gG~ZxJ1`Y55)9j9PM#@xV~gOxIXiHah1dO z@@IMuKbtRvHo=v{*bnJDiCmm?u|Lm3tjFD(d1Z#H=fEWN zM{=5NxSp|cbQ!(Tul?iAV1a?DI*+uM!~D{Q9{E|book9m@zR?s)Y;-ZX8WC@nO|nsO*85YYhSUcmh`TLbr|of(y~@Amm^<8KWM4{A2h!| zE-s5%%3b6i@kZ&mO@ms3Yfy`8Tk_R*0NN!N_{2}3oGbsi*rf^ zvBoOY%r^z1Ere)m4lo2CFJb}}dYUg+rOtZ?AOJ8NK+(xIyK>E^O zsKHnV^3?hr)?1O~gp^Y2Fxm=23zmKJpbOrDJ{c)UUGBD^zyF*Qx*cRzY~PX3mR~lL zl!ak`Mg3N6703i#D5?(KHOcI?f@_%EryfDViwUY6UcSi$Z6NT=OrDKS3K@Cy z^oxf5H_zZGY0kwB#|`p+&Up=0%n$rUJBknCAEkVF>M z-q9uRXkn&)s=a9?Z<-{u;3E}~`g06lN}W56GA-)d_-#yuvP`G4L6pxiLvSDR{W4Rv zu7H55)=<@{7uP9O_5I9?>n0D1&bC4;rYX4oO%xb40+1|lou@Tm1~nM72n4Rjr`oG4d5A2mrK1&IUu`5_yFzV_ zR)F2kWB|qts=M1F5X3$N0>8{;qb}+N>b^uFR-+M<+nE>FM1X}Vt*BFgHHij$LIHIO zu*0;_A!Vpb=HL1G$_E{$z%Y=45};7pmjL`Y@VpgWTP%>hUx-0_}G!g z#pHJ8kr7~_N-Js@~MY{dB#?O z>m{NrlZ~>d5Go7Q*d&iOc**_Di;s=~3sq`Sc=aDgypA>!S`=L0U@G8ZIyF$smoTJ7 zx!0}8_sdMx3fLiGjtL2K&a1P)spWDy8l&*Z2rlMP&`4#FkDcB%=s`hMoc4G~E`h@%x? z9ZUvb%%Bd<7J>NJJ46R28y&2WFy~?&66Ty&V+A%<%r=6%FMER0twY*l+v_?V^; z8#$KRo*pVQO(Dkb6;NAdP}>%ZKoI)|2>ddWjkb2YooeE(j=wW6o)IAyDutLJh`md6 zI42a*pb)!I>%a`^khcf~u~S3`CL0~>m@r4jggKKezz(qh!!-eW*>Vf8FkY#!RqaLL zqfP^gy_kCfNmoE}cWhNU9?4VF#1XgFEg=?a<@k0?>`yYWV zrdpj>8fEkd>^!Y3)2ZyzC|}TemFUV;t*aew4{QYX)1#=s`-}iH0_$Zm;9>?fSY{Cj zT=#;&FEiP|Wk-YxBe2O9T(~A&H)1PaIA9IqRmBlllc;-bsErpX1P=FqR-NIx5#tdjs=ujQB9)4Xz@vR+3wjB#9 zjJ>8dlI^9&CMBOTC#=SDP#(W`e~cb+wUdH~I==tf>|-xF#^K|DXj-7?LWCv3`d) z|4W5Cloon`^#s!Z%NOGX&t#bsG z^F(O37HT){3S+5h7AUwTD5rkF0wo-_R2)nF2;T%K74B2E>w(mFm$TkbDR;fe5@ec0?s_BFYQz=BOw%o7a7|=pyx&45 z9G+AhGi`vurNRSB4LxA`Jkx;81Jtm`WC=1{SNB_cjD~hpJg{-nxgz&L#as`PPSkQA zq}*PUCCD5ma(j(jyZxq)jSjPb!8L*T$JG`v;V`7)*ytWS(kT_TDJ}HCsEcU;W*Ze8 zFj-J76TBZ;SQ0i0+A;9J#zT!F_ff@M4~FWs+(#*Q&}0cPf5o#azje^awOeq7F;Lcm z2G@jU_`MM{X2byt3Ru(#1CAL3t%AT(;VGqr9sn(48gO}v`VE;Z!DW%?H)Qm)L*RbK zJy?TkR?p5ywV|77kSvaiAC*2UrxxVNwi- zXW$i>1=`A3B!WXPNuUOaEF30PzyawY4lOah4h~C2^f?ts1cw4h{59l41xRY)FsULA zSfFxnfJJc}s$)3(0vl*C(X^GZNCbx-l0Xd-SvXWzzyawY4)bGt9UPX3Xs-f^;BYQT z{59l41xRY)P+buRtUEb4z@j(~H8C8X#L{`GbAhdlMItz?CkfObk%dD|1ssqb;?Nf3 z>)^0hL@%sBA~>7@5`PW3Pyv!!IMh_c0ZU2_4zMVWLv0L)A7I-N=BBnX7Kz}njwDcn zL>3OU6>va$h{I(uz77s;B6>*$62ajVkoarJg$j_=!lAYz4p=C1aDYW|91e-$@IA~G zFe$Z_u}B1mZjwL^5?MGLQUM2~hd3;a@pW+UMD+VAkO&UPgT!A$E>wV|77mA0!~yF+ z4i2y=j>Dlb9KM4`)R_9(%2*_V!&;I+4H8*699jVfq=z_M8{_NXkQdP_Dv$^c$AH9N zLoQT+q!tc`R>T2|It~u7D2_v242N&wp$w+HwlWrp;INt`P=iDk4s{i9KzfKnM~tt7 zLyL%BRe?lsI07X88giimB(-p;tB3=ZYaAS4Q5=WKF&w^z?*f!MyKH4F62YO1Bv6Ay z77mju;DGcHhnr)39UPiP^y&&Eg2Qx>_-n|83Xs&oVRA(rAm70O7R7Ox62sw(*kD@f zyu((;A`u)qNCGuTWZ^KS0uD$IaabGU>)_BNqVK6dA~@87#9u=$RDh%w4pS=PfJGJu z2UrxxVQLJAyYXUHN$+ydqf(1Ra9BYSsL>s%!6&y<0UqdcbfTR`fp;37Aine3x{bHalkT)g99vz<1jsjgAYk&ABFY@i$ri( zN)o8i6RAOem|g(~q=z^>Xd4N>4i5Dqy0-#};P7lMaUd5eKvD~b=@oInqKSh8B*$?$ zEQUi7l1iQ1Y-KDG!C?tWpazL7e>kiH4oDAi7>Mz8aHtc}k5(WN9DZ9v9LR+VkkrEA zu!=ZvMs3ftU{M^0Yz&8ghon;HpskEWA~-B23Dh8wg+sOi4oDAicq+!%!674}hboW= z4!^7>4&*`wNNVAbt%w5_L>wGok;Xyxq12ZOJ4SxJ16!=ZeKL6C_ykP!x#)pcjCc4l zL+9JZJAfzV>rxKy8}AtT5m(h$ZsQIe?0{Q@{d9ac0e4<$%qwqDNKT*sZf^Y1Zw_w} zcPQ!YXL1)@A=1nqD-vwZaiszK^|3F?+F$gFcVO3_FNL;pV_h&=5^A5uo;Ld>4U90j zb8h7}>Vj=WHQ$c*=a1&X%|!A7V6+3SR2VI`kG5os3s79Y>W$TlO)a>c)jG<(Zl~Z9 zNu@q4F}45Oq>OB3eyk#p7j>_$;gL{9%Jr+pwqqHBW zv@X1$v^sb@-hTD`+HY$%W*1uMDii}(`Pl<%2ik$!dz!Kr28Q+qmm&>s9|_*A!)m1~ zvUpQyvGx9WseKRL2FDg===UVn^!9AZfype~JY9O6pDBpH*W;f=m@dW_N!sVpC z;kv1XT~R_+-*CO1km(z4Q1|$FfV2NRe0#4j#_hp{-Cf?H_!+}C(K=6mVZme^QYoXh zQU(@F(^+A&X?n(_N*PrZGU~XSw*#Qr-+zFWM(Ju?!xms;B~D69tV&4)ck{Z3#Jiq6 z1X=rsdj{n`zTfxOMR49|zpn=(HW|F;mCvhd?EU4HX|(xo+IxRjk;d$Y%%LBEBmWUi zDBAy9Pwy^msFR;H8U7Tv@F(2=do*wGZ{#6qdYw|j&wG0R?!xWJifYN`RO5IP@vu7aNXLVO0t#k}C2xb+FK^ zvZ!F~D&7s1kVAgFgbZpI?kX00u~QFO5{k7@#im-)5tG|YX|=1E)OuHO2+Du5tLYaR zmbipg^OPzR7pr#_HwzG36hs^3r?|aM?JCCB@Jz4-`Y7qLt5{&E?i9J$e#g=pD_uE! z=UY|WN~!h^D4MwlTxWwXe=bBHjR3>nV-lT0y?z)+5P7|UUsu}yU%iw}wDvSg?yP9FE{ zWA_{0tHvk*Z)DB6xY<+t1-&`vHG9_HAnrHzs;3Rw8;;*_5r9IPC{yjVpbc_F0CxHp zyU>uBfYNwxCWdz~e+? zf!;9WVTYABc~BWM>^XX3Uz#Ah1pooF-Jv{W2(tG{29cFK(JF&MrN+Nut04OcQelfN zGmKh!m@UZWG_p-buQ}&6*~nt-9wCbxK5~5aO%AfnK(SdPtA6q55oz^W4Grz{O=3k| zq@Tw2SCT<1Bu}iUx2Xi&9#Uc7t78Q!lvc?0WDnbmj1f8oTf7}?Elsg0OR&9MY1I(P zt5?{1p;nS7TIFmi!S+>3E6E74h2erNFcz=P8MWq|m$R|uF6Pi{^)}{PFLbbN0*-Kg z@mlqZ&EH6aJ27oVvvS+J*&4$Kh$=(;_Ik-6woL*Y+g@{IMe|m{_A*lWWyuJwKvjwr zz!+9cJ<=HAzgeZsJkr6|(iB_51>0JsRZAqVPGLLxYN|`EBu}u-+f;(>c2fCe$q2QA z;i45V7Hk`gT6h7$#+DnNLu~bq=QCOzY;(Xdr?FMP)qvI@+oX`4C#uRrE)98;iU>`i z4PJ;$XNU~#srD=PEGkLiWEK#e}7ycIZgNXCIRc+8}vi!!nyne0CkFu+PsnBGz9_jrt4Ih0H{fjjW~X z3JvX>lwK<$i5lB8L%k$V^y;vw1l)S1mt=%B?!+nVlrj~Ledji~+(sROtM^fNT;PCf z1y&tks}X#|0F0hN`mvj43bAcJ z5ZfMVBYO2ldNrwG!X-*C$q4mwL)DxpRLwcB#YUH#vqN`SWHnfqV zW@LM=(rPG@ms7wthFVFUXf2 zrT~=M9m+E!+wV&TwURv1DkD=eaTh(Zy^mDt<*@@F*>br$L>4z3*}k;kAZrCyJ;F^L z+46k{SWy@0r?IUGt&lviqTZ$wul*I?3xE}pVZhBhJ;sDawl|V(^hy>QnI74GKr)DJ z19cIta*LY|C``phws>S2VT&7L`|Z~|*jj;Ak8o2*wtWb>u%ao_ zuUYx+I?13Fk|$O)+f;(>^`yecRtyC(y(jN>7!w)U&Qe;nMDp~=_9dZKk|$c_Z7PWf zqeqEWk`ZF-#wonk5e*WYkuA5GhkmQ~nO}OIgKcg+BipN}MD&7?bQcm4E|d%+>j`WF zplXY3&?8&E?*L>aBeVh0Okx5MmY6Vsk?kN}NPxh_p+G(Q>XCG6C0T;(5{pQHy`4nr zU70W-Ai;`Lc&Q=6)fw4xr+SF1-l|?b&%xCiE$(Y%J6)NuG}O+FYC)Fa7-H07 z76tY-cKr}F(63BLk034nGD#(d>y+N>wchm#!*i70>m$AEwLAV##;d=>CUfZ0qh;yA z>Y$`jmmZ}{ZBpY?Y_?H&Gm-sH_>M5S`1R-@AlJLMrLd?ETij-d*qc(;mlj&O>5Z zYUEV&I0P(-3E-oC(V~h1YLpgBD-qR^=+}750M9aoldK5|xmL#7X%A(dgEEIwnb>Z7 zuX+ZnBgBG_{msY71s}bcKEmg{6p9^`wq`bw;8PGH;v;kxK6=-E8a{gCy;m*xtcVG4 z@!5&Gf)*W>hyx@^sCBEjc) zAtF9PXW^rFuUm_-vz(W3`|Xk>GQ$G=w$6N$?SJ3m?5RzXCqoo$vUM z-k$H^qc`72_?)CP>$Wwsi3FcoAtF9PXW^rF>8IhNH|lGA*2M(4_}qsV5`fS8N<@Os zCrRY55l(`SkX!iZ9s3pV;jVoLAH8kg!AEb(kMPmwGxpe;*+hcRZ|kW(@ew);AH91& z4IjOMU*ppo6X4>*?~ntZfl5Sz&pSxuuMtk7jF4OS=$-r(@ZoNL2Oqti-@!+3){pS{ zJs#UZvq4)kn@I3c=Q9!?p|kMOyZY1c(Hr|UK0`49ESHOq6{2hGsHh%{ny@@}<=lfVK1U`YSnN1}4d`XCikI-58=-vKl_~;G)8lSP4 z02iP6N{iiD{q~W7qK+yPf#00qbtP%m8U6qIgpLdW5uPKH22)Tui zeiNYrK71R&!AHN2;NYWQHi+=~{UK6wt8L9}BEjcjA!7RyItw5DRzey+`o#o|&)S#( z7oRJY7TuMI1fSQD2(Rsg_z1a$kA6d;0zQ08!NEtrrr_YCUqXoR`99|O@a%QAW;T)F z^Ccl7K0;^Vqu*9Y!$-fcpz&EB6X4=AUun@(iAeA{mPB}sCB#R_EqwHw3l;F;+Y1gp z`t=0|AN_JdgwI!N1fO18Gn+{8=@cU3BXkx%`YnbueDsS98lQof02iM|rNv++BEe@b zcirK&ju0OqxA4(#G*rNcZ#6jh=+_#EPxM~HRYS4&8cxE}imXuKWd`dQwH?CB6qQ#S z9_5nn%5B^n=&Hs`s< zqu*~J8@0vp>{Gc_UkYvIM&jT^NvNG95|E`oor)2NdcT3XU}t0F4f}Y%A>84s-fviz zc)wv8xTU<`@Yz{u?>DqzT)mI?8#>Z?T{IqE*1HI8SOpw|xqQDtcB!^uD}VTYL)&Jm zY&KyY*GbUi{f0KY->?h|hkQjMua1S|GUL7&KmTLYv%Gg;zE_`fHiIVLZ>W~{8)k=m zu_DUN($V`3o`3p4fbV}pF7nI^4x8yvBFOn(wmMJD^J`x@hh7BF!mjYr7J0x}lJm0g zmP0+umeuJ$|M57k)k#`tKX%Rzl_Nzp@%iF%wGW-9cqja43|7E@cH+Ce=KKol{h0Lk zJCyyWEBjGNq5bl{Lv;Azu#Qy`1mCtGnD6Y*2bcYwE6h~9TN6i`t-UwAkKRZ!6DMzO$J=R)+|DQ}Zl^2%Af^U8i>Eip*?(6V;2t4NknL3Ye zY%5s=r_In!8lQL$maD?`Hn!ILNIQ8R4r2t`(9i!xx1m*yOC?HQ*Vx7rZajTJ&Vt4k zo^#X8gKcm;PbX$7C^nz@^FfJJ&q_s6W@%dV=b4~%JQTy3G~H70+Qt?Sr^hr0lc1uU z>4ceorhk6pWpMPpBZnXa-~hgdSfG~$N zg2(zegv|o)wJ^m=ez}ytwsA?AECNr{YO37?zkfgF|8o6)WoByqzUuP_ zRKIUIsZ#xZOmgh{eZ#9&{l+S?jfBZ+KTFCd>i5l>Z7+aS^*aVf-CzC2YSF6SSk0%_ z@28Z6c>P{-^8V`gqwQAx_T-?l{r=Pe-h1WRntQDC7jS-VE{rGP%N(^QwrQzjm!@ySkM(+-9#8FLeC~x*H2$+sQq@pe)wqr+IDxE%@wu7t8O?t|BQ(Djf6CW2 z=7Y%Jk9yD?BPDFR25K@^fCX4oFbSDfMB8? zRHpcYq7A+U(1gzELybNWe{|!)Yh?0|^FG<97yIjiSF40!kMFBy_wcdDOdi)r$j~0- zmEV)~C~$^1xzFp_ySEpVpgRteOp=I#R%5~1V zZc^8xPJM8fncWLxxQ3)R1fO)$(Ju?1yy3Gwzc?`h(c>97A8=xQFkY}+aYY_y+C7G{ z(0jKu8E@9SytoKYy_$=YWp3as7Sw^H+Oq^68>?S`=J>RASl&zoTaLc{^YdPH>oDGV z^H6v=oEC!w^4U0cqPhBH)p{`h;eZk4N9%LiHKWQvTRg}bZNK+1h@+6CrGG^-Bs|$m* zAf;y!8oX1V!qnuFsS)tMEFR9$TMMNJp!7hh(sUu_Vh<58QeQ;vO{MJBxoiQ0-X>IE zhM&k#Uf2T`Jt+1%uNdIf;zBp>y8)kJ&uhbg*FB(NnQpH*hNmS$A0WLOQ#Y0&%ES7e z-GHxA#*@Ns}7k1V(@tD6dZyE zU7Rj-oGGsSOnIU0TXZLh#lJb3f{pEc0Y0;eh2gH8WT1_V5G{y zQv^^(<@Ei&0ja~@WPTc=TV>LP!C}#m_x#i7hq}P9LyG!~dL(JhVlp*bj<>Y&&fg%3 z{vwigc~yV+4j)6|4ixSZ*8Za2i58|ZgR1ZfC0Taf(i)@XzEoPHsMW4ot(Zy+KlToj z$6?zz7*W*$rK)To4i5{Us@jHqptoj;V5vUZZ&a1E@I(GoRn^QsZQd^`l>zwn~f;3Y>Rz$XQgs>c;Cm1yj8NV4}090^TO zQk}J+yj%wr)qK{nTUxKw#jom?*2aRMYJqD(xek<227{ntFI(vtuF*#XybeY_DceU{ zQF&;(^1?adp}Ni?76EceZ^9ycFXDlTFGYL4gRZ)aL%?Jxrq6gj^n((QFw#}y+m)lu z$oBl&t;bBl52FagZ3%D!C%%_4%%7s%VN^4RL8rKdiMYf376m$m1m*l?b^1;Y%^RNL zRc*&n+!>lt-ZG5CZ3Bcte4$_mE^&d+5AX^VK@M~0HD-&K$5XRkpO@1F{`tA$NLAGU zJ{2IPY`OSE0Mq)EOfBqJAu}$=i>#3nJ-NJPj8>I5h)##nit-vs!j2&3_$@GmSD4V2 zwr^s;8ub1<$;6DUlJNB-fTLoi5e2^)pq25KRZe z_0neQPyWSr9HuQtZkrc zGPJba;+xHqxuaCrsbN8_&g2UTYCJrQpc3p=Uy_jLW4!=i1ZBukkRBSjQ(@95f~+WZpG*UR`*`EnbJ)J2EB< z0LUdRbBmfSo^ksDn> z4WS@3oV_FOu?6K^K~13`oWib36PGw)-t*!6!bUdrA8fSLWWq(kL8B%e&INP}(0z ze`Q>n7fRdB{Q0glFO-I8ws!;zpB65-0WJ$e24f_twirmRQTI>G{Vj@!F$&YRuq;lm z-8;fogmy9J=i5qs;w|#rPTTt&N$!hC$j3i_Ax=Hz$A>hL@cE>XqBL9(O4&sgmz!MN zz#SKaWpV24-jUavTvc{)Sav6Kk2krvfpv?+vd=J=>sQf<#%NO}elA1HeC|lAGi&cg zG+obsnE# z82aB+iK5U$0Lv_E2)=^*QXfX|t#`~V+>u>%RFuEgM|bc7+!@ERlnVEcc+4udqkref z2F2S>4!+O&WLmL5zm>HXcv*;7RNcWpIJ3abR+bG;K~{g^eq18P+%E^w)V^>;vY+t_ zs}YGY0SS(|N><#=5~;PmGgewY3S`DY^Y(~&CqW+-?WbT|Fo>`J0lEeVgu;Ue{Ij3% zo`23em;F=d`BLyX(0rGK!ISTX0o;JcXO9>maJ17B6Z1U~s)uIG##kbX{OT6@l(`(o z#Q$hB!e3_7B&^fU6R!}k*z{(C+c(K5H zvNspc-mQ@uzY=T@QnDitM-o! zg*L7s*pfo^6;jc9tV!0x2j40+$9sQSy(*b+(6|NH8Dc+wS#h{Z9!NGu1y)1_wg-b) z#Fp}U-E{eT-swIZ2*;u{Tq~8Xo5+{lST@w5{y-f}IleD+qeS0SKy!1objKn&7AQvQrKI0lQu6`4{3;dHP!rjJ?wI8K?94Rx~2F z$NS-YCO#kiDA?Qc89jf+_`TS^3-11JkcUvaTY3%Mo^Zzr=L&WwyRoWO+V1uJZc1>p zm_t&0r98U|uT!g%A>q+NSRMQYlRo9`@<=JxAFBD6nmn9gc(Bh}b%MF$43SDKs}E*E ze`#j)-4VRhVa9CjyMrI_>$_USe7(LG*(WoH^7360rp;!prD2h9Jd>+5ANv~oRsZ+1FjV6{e|AixB&nGrcLmmGd_T4-a#E~ z7bx}ca+o8mkFfXfu_#%MwUH9mMc%0xVp3!?lzD?@*aLJHUe>HJmGXZLIF%inhHWnJD=^kQpUcTS~5mlHGXH92}dbWLYs^ou*`0(81hv z{JKKvf}egI)n$}ipp;w?Q}UITl4NcwxdTe>#gpma<9NKD@Ww#mccCLi$qg&uK*10R zB0cdcr>SRI>UCB9jBt|-qwPYa?ZTM0I4P6JkNKVul)T0GrYM(?*?}mxR|}uzGyavz zZ@Dx@`B_SwtUTLJiWnXqPdtQ6SrLUW^$^c|M1?uckO(!<5cQGD?Ln{rqR^o>N!A8M zv8Dxg3vkL56H9#y%4OkMRx78annrBBsUzq?&+}^MP^XO)iCv_YF$Sr%T7wiuy_y|@ z0$5?ZFW>mE5Lry|8uxne7H2}E)%jd>1?ZBv&(1eI&P*u#u^l|eun zLSh$?cSya9Vg?ZOhMFBJ$j9TF zd$7DxDm+?je-sgi6JEdYXsNJgWTuqGGy1LK;AY(ksTnh?W=q0HwlNA|^tbwZS*=1x zfXUSm3(zUy@5B5BKO7TSY=5dbdoy;<@i z^BwTpji1cO=Xi5L>xR;_-l2OqK4Eh?wVfgF;sOi9^&V zO2(t@Xj032@T_Tlsr7!&&O6TmqIepcUB&eh@mHJ$Cu!&!A2(?xY<9C#g1~O*-jlsv zbx{Mq0sY92rT>Bee^Iwre5eWzzUU+HIK4i60=@z)1*^>?UU^i|qn`|qe8`A`nQLVg zgirbtQa%_-S*xY&SsqF0yI7>~DF!UXf9xB8)KH48aHC?iKA46@F+A}owyz8QaUFWO zk!lbq!^`c@syX9qmcd={M+#Dh?5<8~`3SqN^73cl?!*J@9Kb^Z@k*s4`xB$$YFkC% z5cX1Cssq4N!2}TRmcCQicWEGou8n}`StLMoSyH;M2}8iAl;@6sEPRBeQC9q0(R(yg#y8 zedwry*=+gRYzYDhiL0e66lp@B%s$jI``2#~vn4k&TW(`QVfLfS?1jO{$QSp)nbg@l zXjT1GLW!wDyc~r^ktMXxGW%^x14)iFklUCBI%-(mpZeNWLgJ^TzY}TF=P9#idi!TRdx(YuE*UJYp*)_dLxT)}?b(n#DT?}a-MyUFu zSG|02Gx>V6u&F@1)2G|0M`2h5q6;6~UQ>sJ+KaS)=6NXgP{LWZjqFA|5ZpZ} zdp(ykP!@lwC&o>Ana&p5??WGjPoN-GCaHsbkRD)rE>>psqPsk^!>5XnlHJ(4o9kDK z+x?(IVyV!Lhm7bM11NT#oQ z1iBu$h%Clozu17RO!WAzyXe$+^fp#yHx3A}_~<@Y$<&UwKs+B6Au@Ab_Uo|>`ym@y>kne`e)f~7eedal;t@F-yzl`#~2454b|RW=5xWVfB9#|kh^ z9_9l7^>Tqad_XQ97|VEV?=v&1RtyuI?2LBOXGnonO#1GSsn&yLrlk( z9f=pEK`am=?%1_s)|2t>1CM@GO~Y(4$FFW4r3)zy@2l$jukcEEqLq(!j} z-GF;|HZEyWaSN}o3*rr!%_-!W3`at8-*62*?ZJm-vyVwC_Dbplk{aAd_B>E7Dp0yP zUSL`Z8zjw9oVVs6G5A!BQL&e$lDz6tcqv+6tF)`tRjJs^GD%M9#*x-2)vzBKFP%r->paj&?lITb){;hcZ>y=cNgDER(E& zY$_+xn)(z+yDY(u1(WQirm_le2FHn!Z{(laXxYd&}MiU3>tOTct0wr#Bc7mM+ zlV0@O7AEW@vAz) z&I4@UH^FYAhz;iBicJ(TakR%1>@39Mj+Q>Yush1r6714PWa&fy z#E5K8f?fJRHhpSo5B)z4Ywg)?rKx3lN12#fekQ@r3j3C$?H{pBg40A16Gy8`u$w4i zSK)1fgM%1U$^=Uj>@39Mj+Q>YuqWk@oEz6JeSDEV^e3$o(Y~Kxmp+h9pJUoX|KEpImSqpign%Kl~3WN zC1{oIJlnxpmq~IGGhENqv_>(RE8LY}$AWQosalBytyLz*CWXg-ff9#2G{$J+K)wI0 zg9Vg00 zrE(&z(JsdUz*Px$ESO~1mdZ|qXvKbr&FgM`vP_awb1Em&8pRr@*kcKHESO|hpUO^z zXvI!XaAKJxr)8;}NNW@mM>`|Ijs=tK+EUqx5UtqVrynfD#L@0auxpMKi#yuksoKqE zs@CpByq4zHC(9)5ye5?sX$@@RX#Y&GW5Fc5i&EK%5Utpi2~I4N4>;nqYTLs$v(VDn?pUm8T^*u}sp$*{PgJYY-Dho0DM2 zf|{M{XsHY(aB9r%n(bm1F*3twbPU&|utAbhO8o0{F-EFX(#G^=DH60wH3?2EljM{h z|4C~U6aT{Is*Kx?toR=qnPQ0+{oJecxQylHb33e=)WS386CqlGha}%6cCdo;RNjMs5jbh?x z=Ooy%V3OVb5&OcdgN2wl+7}b-CW_eP1gD82CXO~G!OlV~?r4Xn)@P2Xx<0QyDGnRU zBr9N9DksvK`V>cdUxFPACfUtPWhX+kVo#hHSBzznoWz)fqmkAqCXV)Gf*lJc+3g>( zvl5&pikLXs*$H+NMXbB&U?C=s_W1-m3$c+5{>dLT+HhCbJRcj5)vn3lrW2r*zUN-I zuks#S9qYK&QSFc3_x`!_g+a7`?mfJB_s_i=N#kD8OJ9zaRPB0~P{P63FZcEsqls?= z+xBW#rAiMq(v3?z)Oa|-&RRJ&#%jdAU&QZY+^XhvES)Fj&eGr=zX5m5TG4Ev$kl?hJiEA#13 z{}azL-j!fyJ=6~UYdnbkVCKO>Orq_h33d}j?6d@@i6SPBHYdT(LM-lR>CYPNZ2)(? zDh^xvv&Qs?W~6l-ZB2q*`Xiw9Z6@|Zvws{HS1f(^MEW*>#5R*X33k>)ZOhU2kJvjC zoFuk6GuBc!EU06b-(;zAtsLY`2;%)vACn9k1yI9c5x_`Q8LOE9_g2wtvJ1UlvzvqKJv3J)U4UQN&)8 z;6xOXF=PLTiKDe7*jb3h9W8x)VNc3Gjn^C9`b-~Rq!0Z`>p0qH6713kvgvb7d+47@ za7v#srLQ|A=9pCpcGlF~aJ*({?(me$AU>?=cTd}A-X0HNpNDBB&Xw2Ig!@!uR{~; zSTM=1DwUlG(Tcqv{$T|eV8b#=PM4>0BCSzO9PI-Mb}X1=H!qc)2+@i?m5nRLGD%Ly zrE(&zQA`}|#|d^UsM*CGtv;1!GgGy8XC~OOOj5gLshmh_w3D&iYZB~OFv+ehm7NID zihcI5IBYDFVCXQB{V8?<< zc5SKbM2J@G>gfjyF>$o_CD=7bip3o*{aNE|>a4Z<;k3AR>CYOkNwt%-1~zfD#}e#V zFbUg5sq93ER_ycyCzeTanw`ptv_>&;v@;UySTM=%@KkmpL@Rdp)PseXINCi4cGsjT zc2TNgq%~DJDZzf*lKLcCMqPGL*onF}rGti&?}d9IT9QzuX66 zj3&Mf?5WAFO2u9_KQwZ`K)p7`CRL?tQrKAfh+7q6j3y4$ z*YF(O?ZGDsl(^OX33e7xBUpF{-=^1Nh2#*H0na%7^aQ)ARLqv7V5Y_*rWz%F9hP9n zf=RDnYe_05LbS_YduSXsmPv9FW86h77*1NFnE2Q71UnW?vZM7$#cafWaY$UTi6SPB zwj;rAqKKWB;51Rh#L-@#U}qr~ceMIc*k&_T*XMn;aoAWUSpk=)aw4s%PjR#_CD^fG zl3hNPoe0s2O-*oOnItDMCgEtLHHwL&O-rz2!6du=Ble-1gN2wl+J_VDCW@Gx_Z{uN zL35iZV&Z6W9*M)wLM-lRho{zOj;Xpn=T^sIW0_h>4?3Nhmf^#N>nQj$#u< zOdJhgXwA4iv4xm8n&oZo>YCXf$8W#A6@9F^=YF}5^WNPrhx0e`UHAlJ`|hs7^Pt_G z-MIXD=Y+-Q{((ZVv*zvdw(&8=eVotu<*>BcpQmYKXDLqnd`8W$hhG%LSN#HYx+v_- z-~*ex)ltXl4o3%OzaAYBT#XM;s*{bY@OjVhyfpBWvlw|`$S&_md@FO&?oD}cnAN(g z^oaK)PTPGPM;`M~km!4??Q&iizcJr(3%=F$H6&9O$ zg^!j&@603-saFd7JP*hyM-uU#}{wv`nz$1>wQ1UX?DP_KsGL%3Zl`WL!UJ?m(7e_=g;aNDD9d-;RUV4^DIY|a6= zhI~~Le{sCbQAG6tneTV6p5_EKykeW#zVRT)7|Al=BGQ z;uS|118T=@@a4|-T{ut{A3=rVz-bCYaGXKEeHVP(M zDn>q~qZ>UQ*Fn(>N|!a(FThcC_`K}=#vGEH;02lBJX8jJc=4sSMp3-5JeaENDJ|b! zT7(Y;M?UPg@4;6-eI(+WzfcA0v_c)KvWT9aTL3jE7+<-a-&jwD*wey&paw5ORld#h zhNU@){Qu{8poh%_rzs`=m+=r)zKmet;IVRa2r8Nn60@Zafjz2#LtDV z+(v&y;@pI^aw1}Bd+;VE6t8P+&S!7@1u5{KABo3VZ3`f({9t1*5^%O>a6}fB z0lpxv$4TD8?olWvB~({}BR*%CB1)O=!;^*Xatq<8P4o&2VT~3RT%A(roryw-zY8M` zgLkJCIwdUhN^>QqMOMj>GC2FNq?XU2R`+Sz!$3HW$AfO6q-m>D3h_JnwxkCdI|#W} za&sOxL03w-H<@x?Sr_rIYi!t@V_|*H!8iVdYdA>*zKsrlMs^4S|c z#R7%}kH=IZzm2l$^xN`6obC#BIzT9Zd8Nl%O9R38rX&&A3>xhWm6SR@P8QejF?w_ZFE(D+KRNek%KBmu;$Jp9EPq6`%=>F=>{G|u-Fm4Lx zb8?8feb3fx-Oujj%_(3m8OU!hL>j+yCy62!iTNH|G zg@XOJp*RH;I+)^Uup4J0%iIoRZ=8W!FGK{Vhhg?2jAkW7?IdZjeJGQ?9=rW`IZ&Ow z{xDvyY_BKET8FVeP)l2fuaF3{51cPEob43;KS&VKB`_<)7xlB9@5Z%ii#)@nA7$(D z(fuhnRwe472QiWbVugT{FT{-+1a}Uo&5$;DY?q#9OhdYX1KYrPjlY=@?DxP(JFbYD zJ+_7OdGu|d(|z0Y5uEtb>Q6?E?MAOUh?*QgP4=QDdr*_>QHz6{gzZ3{`D68*GN$Xzbg7+7A?0AZ z{A^gO!;e0Dhx9mf1p}21Tp{}W1a+OA8lKPE3}n$pa3&twh;dd-#>H`N1Fb=+yDNR6wh?ocp)_R0(VySgUzl=y((bO|IA9BrF0as*DLGZK?iy(Vr4oo9xZp0oO zG(#CmcHlC5yIuH_-=WeWC?n?} z$1gm{$t*jstlai88sK&nZxw&o?s zu~;TJQyojA>$8VxmOmu=FIK9Re-Qe=!CJfjqsbP3aSHpMp~1S3qP_0_;`)PKjYXg6 zUy+Z_KkDyO`p>+TKjHrGw~>Q~{U6fYwx8(#gu?dtiS~a9Q7QdjReJviO1IU01(>1T zIQ?IOXcdU0|HIi@2c`e})gPVsteSAL|BEZKpZ@Q*1ML5viK_8H`acviHLW_9l>RS* z-|7FP=sxofnK#Yg=x!X6XYQv+mU4W9#JS(%d#&1V9h1f2!6N0v%_ z;rv03Z_qDdtrznOzZFBeMfG5?pfsbAv%z2~&H&?zecUJZRp#`+t1>cRK72S9-ok)k z)|)tf!{``Kd$``D*T>NV?`FR*a~wU25AIfLsj=}?J`ITZrPfE~K&*i{5J&(JuO<-Z zRRAJ54RZlnO)6-TKq@#KnvkFt1i=i%Vr3P2vUi_qh6lzzjt|1|sc8?{|NtUeO7_?W)INvByKBJz+CR65R6^s2&>n`Zu{;O2;w zD{WXSKWkR=$L~~$on3Obg^9sXjb6D`gVOtv#(yI#s{u^sD%?Uo)_{4H%N9?^4H%KF zUuN1(Gq7SK9Myf-j5=S6$wPWljC>dTFrORLblHt{D}S>ae7a1@>_+d{;%`3LQ%Lj4K6IFvpRUTIG_6BXZ?(e(~@~MA+X9up} zys(54T8-Z*dqe{Y1BOM0!d7`fI33@EuW59SN^&&>3Hlj5X9e)M?9b$XPK7F2iiqE34y? z@eCPeuP4_+EV=Z2FbQn{lrFzz#3g`9UKuBcmp70;s8rD> zScgeDA7C5dYvo?+WGs`~_=HGua!Nl9)*d37DcPDcsb?2A zUsPp+YmXpLRh1}Jt5jmXu@b3&a!{hrh~ZsIOIxt^t`8tb`9ZN44@Wux&B#_hONg9C z*WOOF6{ISGGDp-IKTsLVG@K{jO()if077UrS;XVomlhlR+wCvn^XhDcbX#ApN}cax zwfMD1cew*InO3SyObrnQur_h_&tz;C#t+1YF>pfZUt#$>IuaPmK+AhE$j5J?r@wOs zRHVe>7G9yDpDKk4y?&Vq(#mnHtK3KvXR0)DUW`19KVa75v~Au-4O`#oR*hqo%Uc>y zD){fmrDdZ+*6*K2ZGn#20FjCL0f_2CFNjbu6xYJ6P<0e zlJ*b9>@SWs>yuZ@?ayt76J&w~putQZfANbY!eg_=Yk1jT7}iO{hh~c&lghQP`2=B; zvwMfK8)qEONjwgkt(7BkTai>#Hyh7zuu`~jLG5#sAMe6n-r_+(6|fy1`A-5Qe7O-q zr4Eg(qOGpLH!6YK+g*Xhr$2eXGQSG$ddE>r$V>_mZRgiMhoQ;T!e|NSh1v=~fx_4V z(-+hsqM=O|QPDiaodT4EVjq4$A4pv3gj+;STufuf* z(g$$ejUH_UeT_?{b5PW*fkEgTE{mxRJgonj*s`Oy?clmYGeNpy5b|M)3Tf&`%9?#Z zTv-!j=d^Y~CJ?O?aObpABist`(5$SrVp2Uwpj}zGDyjw~LBq1*Ck1mzTJI^oV~X!O z+>$SDg>U!&Veehwt17NN;2n}ff{KY2YqUOsMGHPEPsIWnkfTRU6os^ij}S-@l$V4k zc$*qLL~}SErBy1eXsNZ8ROfmSes zfB6<<(T!qkuX+)bI_lJDeqmXvm+{32$4xxzh}Fxl@Q%TUaXaze=|lWgq^QDl8VFkC zjwA@p!}?SlaX`eAAT$rFSRq;g(V7IId0?Ui;;Q(U8~53FwPV!sG&^7~)b=ijZQ6N=Ye3iXkQ?r35Js0bLMTucJGDz0t5Lj9*hU zoYY`-yzZ}0)5>TMz?If%k5r^YfgjI|un2%nX2r6S2}k8&TruK0OdPe6aja-mx?^7o z5Hw7PW)cM$5|w0DhKY`P!Z0XTdUgMwV_;-Tx@;h> zf?U~>;Zm&ob?^#pC8m)mK;Hz@aPO6vO1xKMD)wH%b7PUbYU!~d@lY&p8e*quth8P) zHl(q$`S4<82(EjAe6iydc7;6F35B0vFKR`X)`uxUhqz2==g1*i(Fs1fLOGv>4(b7L zCxB54jask{F!l+o^sE!M_X(_It`m{%6WB200%He{D2=H*k4wW0m7~LDCJtbzj>C)y z-dAL1V8cu?#kfyk!%QmYnK2t?T;PgU(AUhFl2)0^rC|n1Xx>iEGy}L5!1Gh;w@+Zh zOe*J@F&k!5InNAim~nwCHi5p49y2bNhCU>rn{}Ak2;fZ~GpU?s2A-Xn^UT18nN-d* z0~=;s;EMI2Z>`6S%cWrkNf;P7%&Z0QdXJe@&NBlWW>PuN3_LS4=NW-vdxxTxG!{r6 z&IM=zIzxr`_6!cSy)y=lya@z1UB2i^@*oBlfpv71y8R` z$?5<@z3&sx#!R{b41DXA)f@~Q*Pt~8O08O zeLf(B$`x_uKmtav`rzP!vNZcW=S3T_Uu*BYaCiQ^hy;PX0l7tpl3T^85$U-Hdbt^d z92!EojUu{;KB6(LSku|Z6rc2n1)*q6r7W*ScvWG^EBelu@+_|DkiIx58)FN7Xx2M#y)=3Uu7tjgr(|bxKW1SSbPvB`5^%DN+O{MWXew*HN#EVZF~zM8%pW4&cbpJ(=QMak%|LNLhF3=(Ari zwUa82jVLm`6EQ`)z^?T+MY^=~Wxdx4VaeiHi{jGEyklRBg`a=jz7{DSRNlX4Ukelu zYUN+Eua@G0sQ)$ls+OqV_Ep3w7E%SPqvmuAc@)Sq_LV_nUqy(LTg9n^?DX7U_Hr|b z`lWpp(K&`arsdN}(x56nF`0Z-Da&gSUR7A~O8e@wzO=8jqke}-()@}?OZ>LA8y+Mv z2JOKP`f@;94^6Ff&-JhqJ8?Me6B??V4*-XOI_wqq>I|g~X;pB{M$+K9P>9aHtE}+e z_Q9c?-q1lA@$yeHh4E4vd}22 zdn%8#{Kb9aOWV&?lnftgkG}_2z4g~vU}iUK!CRlC{89J)SP!4TH5G9XyZmAQRR8ne zpD}-tln;S!F9GfLS<|Hu2-WOIZ034rq~n~(XQQ4B#bgkuSjRW}PnUg;D6>8qlU&%ZcjHkYBPDu7 zM`(iJE4*QxrV4a4CIvH7d3v;$DSWb|ZWHKei{@Rol%yE%`Mk%yGIdo^_CazfPQ2{) zO$s1+Fyv&`@HReh){~1Op>~NZ(cnxZAVfA81n~qd44SZ9M?8Kb#E6c0rYi zrf>M@#A%8(w+EV)43$=lRALP(16{vjC#b8U+e?AP-fy0;yp;J3%Qv2lgtAy{t>Dxe z$Q8js8AJ}bEz=}T>*EAv%wuY@Y$3AGz;cbzKGoB^Ygkr`5~+C&zhlX7RF%<0RWVA# z+qr~d&z9VQ(-s;#m%6+~rH84!WE)Lo;n59X!r4EOxxIz!?f3L7lKo*7DU&4{bcpyE zjO;}B&RN<&f%(*S?9HAWd$4ME97TEUt$r;U+xM>WHMWWdY&2Lw(MQZET)aAHX&kDe zrOjkov_%H4Q5ksKSI3IMJmaA&!K49tI1+2>qRigpw_7(T!y_b#UI;(APJ90Tqbvs*on;hf!6byT&~o|kTCThi9~vp#Vd%ecGH_sD|5_*}2Cc%sBe~ob zAuFnrm=l!9Xv`tdG{Eg>$X@lA%#rE`Qf!5~Inp>)iVP&?C$}eEGMB_vJK_LFo66s+ zZOY%CdYtixv$2gk_Hk-O&kpElLx&b-KoZ_rgl1(!uew9wL=i@ra+=z11#HqF!u0KY znDIoZf4t)_55pIKc?&@0lk&~Jx#mIy%j9X!i>?A(c5 z4&H_QI4!WNs4yl?hohpo<-2y~6+mRWpd&bKpLhjwGY3xGEoDjL#qjQ>7>|AT3YDvD zHmZ+9TCEr*HiP<2I9IX}(rR|8-vyL=6L*k$*#(4&ShRejw%V7@Vl2nH^{s|{Jcaz` z5XiTiCYaW)l2BYQy<|8JW-$szu^9G3;v4Okp%d-F4a(?-5|`097~PpqQ(#!?E(51P?cC&cgl$|9&5lwu zq8~mijmVRy?q6J^_nSbnr<%N3w=qU!jE&I+2lp6V$e1lG(*PVmKTt%kL(_JIQE+Pm z#B~Nkr%4_&Dtc`uVsx;Gj&LI>*U>A3I((GxbtaEp$Dc~k%f>r}x$hF7afa{}MM&#G ze!CQKTRHa%oq)*j8-*qz6dU*3S=h5yf^=_0UkX6G<<}%ghSLy7Q)4qpipG>ORBQh6d_{UHVOW^B6~82VzCx^S0rLR>_L7w9WjS@ zmf8nr0e^WM3szoorE`UK{U9y(F z;Zo@CW}Nx#9w0lCbzbNqQJ_NsH{?y-R{n|9cWj618wH#oifaJ8}z(NpQ)7Hx127pjxb%2hs+l_ zUBDZs3yj3&9(Z^SS|Ck># zpdzJ6O0o-#Fm_?*3_!bf!RT~}sV{BwU&YYRv{a*2cF7pgux8*-}i z*oOGwS`;A~{gGL~|2m3jWnZ{M%fX7F2bi^eAM{P~E)5R+PGgC=TI-R<5Z|ILd`M;3 zI{++AwgfUTx%BYyQx!>y^bXUb+eCSjW#YDGnrYUBv6<%_Cv~t^id5w{=0;gxfaw7+ zG6zRmYC^`MXDZw*m6c*@lHneJiup|qQ|)iz9z3u093&LSN~L_YlQ1)G17bs26Ej-D zX4F)m?FRAKeasdVX&f(^19xANY?*W_C5fwcu+5`)f9wHTC;xn*l_v)L{!mine=kmX zFG+s)TB-W~M#bU2_2qBQNqHZT@}4X2GV7v!YUTLVm*4nH-E`g!1Bj#{GmZp>TI*_R zm)0y=Qi}l(_RK9>QoXop*^+9!VK-mZvYN#;SFBu!H|*T2ty#WuPIb+4yv0M&`fj@# zW#XloFR0}LdpaWEyOd2Mn1tnW9~R7U_>KI~h-cNqkLd_vvGN`X-Y&tm?(xXbiZC`k z0?>pBJ-?)7pQ?7jiZt3Hv2oRGsdQKpZ~a z&fE9w-zg*!a*wG&Uf-p2xBt{s77<5YLq>gEo6fD83JD;^D}|7TFhH^=v}-&_mg2BR zdK{dc+o?9T_?wkM;&kZgP1rP2qO|usSzy z71U{OH!$=RZY~`dYU>^u+8#%K-}<@xIDy_q{QVSt~e!b0OE>L;&KpIkQ`?@ zuX0??sh-x9x^}d}RMiEfU+X$URgKcrEV}n4P3=cjB`~lc^Ve zm+pPXLy&=?*_xV#p(&c$4~B|8$bB&ME=NUK8&jvIQukVYy4MjJx0gDm7AHdsTkm1b zz~yzCnuW_tHMJjHj`kq;!DWF*PljrHXMpnJ=wt?l+7%>=4g9&LW??9*sr_K6%7feo zL#KN5WMJqpP3=n$KW0CWwPAH>Y8Hlg(TZsyKl2lho9@5ww;Lmmr2Br~ zaqjzd&%3DqG28s|k$WIP@zkk5w9`;LRm*EFO?fZb$9oDq)~g@K98P2)^UGLZOPi62 z@qd;i4c7d=zzH;l#sz8Uf&Va>A9z0P=M^r=+vu!+eUbI-uN*4njr5H#ztDR4KX7Im z^!~T=U-IQl`IF_t!K_b4oZZ`JYPlu;$-KwE$_shyDBF_MwdcR<`};i7N}ru9c-ogX$a@dE06V$+fQZ^4d!ObbPv z&0Q~_NA8`?U6@_f#kZFroBjI9C?QtSiBd4+tBczfjBW_ab+{{CAs&aq;z}!-I0U{Y zc!0{cpNj*<@Q!Ja;^qxfkB;{v-U zUY2T+#VJ_ULUYm%vgq!zXqqx)1~$y3a-JD@W@gSa0>kUf6Ab29-M@aG#*(smRm;D2 zo~BnuDjXgEYv*aW#D)RlM&Q9*;NRyJrF!I$MFEzGRm$>OgjW@oycXkChNR{c23Pbk zgv&HupRMC{RMp_)Oa0?@2%Lhi+bH9I`_%*30o65QOG5Lpy+GE4?+IjyF)Y`~WG*=ygV#6La`0;ALu)x&js|Wq z8FZsgGxZ{(8#*GonAzUgXs#UxR-4S@)rtNaeALt$Ge4ih!rPx{cmqqM&y9F? zPzOGjM5HQk<>$#$F)?n#hmnv#0E#^-zt|xpz=5AlN>&V=+(;tMgh5Wwj~%zT`T>7X z5PIX^o|nnR(@f;zn)6&dX1-sF%so(b8-9jv_!;ERdF-s~DlNQ$mkPrAcVW4!6c;`} zZoamGDXHOcK@qvugKJZLhCQ(0bJN zI+MB-(S03}bs>3^Z35VbGaheKBeAj6g%Hp8r+$3I}_WZ)(G<&t0W&;;zcnKaC7>?}E=ChV~8jAXZoqmwIKYm@&OW z%<)HMZr|$OLTg2<EAzcoa4G{Mr4(_ifkgj*dqT}nZsgG>Ya{E16Wk+e@>WP#~ zxZZ2FZfkSfIjjc|nF4ZAfb+G5pO~1dpJR+cZh5d1Ms}*y%Z#1ywQlkdRRUpR^OHhek{byOv;P>DtHV1F~ zB*9xHcuO&uNk$AWubgj?0{XRH;lo`G-2C889E4k03n=gMko){y`;bS0(*1MUtW=6I zSu)*@K>C!&RD5e8RUXYJiaahq{nPK zyg&*_#%kj;JFbJ?tnX#0KZjn>fM$YSLrzBV^>lkqxe9W$O!r-8dCc|FPD0$TM(P>A} zf;>bbpdG~V(Do#=ghh-r{=YDNLsIFRjy0Tg`kq9qH1-}e8v5poyWF8~4U~Ex^qKq4 zqJh)Ls0q{c0Sx>gExEx9mx5!54jLrZ7wS9RGkv=uQY(RhC*=VGup6NbpSF^3eDY0i zM8ke{%Cf4+fZ-@hv}HM?&Sw-45Sb5(9>w`W482UWuEU9wgM_R$J#K|{1O52u4Umut zYeoDRDrzp=x7or5{>y@L8SvKn=I(C*sXZ~k5h1o;9>-Wz3Te3=_iN62|bU*^# zt_1do1h8G15?CFrd%?aA1t|SkSSgo;iq(jq-Zj}bYpN}MDG#bK@)2RXzDfCb>G-4y zTeYJBNK*O_Z7;GPAlTb()vQ=r1$p=E_{gFVVlOdda{_BS%LXxtBCK@$$XnP4ji9@~D9IPe|g@;8;kk;V)-pxp^5 zoPY}3*9&+zt3d=Zmc-Z#e1wcQkXjEpoM;iimruOCFp_=sFv ztOzEWn+U;PX<>LF*n`2s;o_~O2->q6pw^9fz=JnzY-R|%V%HXi(QYY&jnNq6a9B$d zd3shuJT55kddYDNTWcf1jj(rE0@wRfa7#v11_g6UpFoGt>er^0`7Ix!Fc!W9o0#Exo&;$_{?ouv{t2 zP`HhJjoHTd?Q9eFc7YadGos``+5SO)QDSeeWnbk(*_10cZc)_R=%5_~6UGK+pCv$y z()FBN{qC&zhp~5GsGRRoSuD_ujRrHJ6e|AY z99c5#$dY;dvTP7&t?^7Kh006%ixO~_eUc9)l!*02`(#>=(aQn>nkiIj8)w*Q;wO7L>hGG4pTy9b)!qu?cq^{&r zi8(@YF|5noxpIw}V+kqp+@#KBn#2@IE{5eMh8>(zVo#PG!_}XkdXu=2t4Kj_=*QuY(HQC8A(8b=uU%3qxb;1GcDYHgdH}3j3glI zF)DKgXG@U)me{ilAh!|`kkzXiA8E9`qKDa|3?L&3=+$(j5u5y^0#NjDoBQF(H6sZ~ zyQuak(%9nxo@oCWQyc;^l7Q5asOj)O^Z-ZNn++f%3FxVdbH}GJGuDn7z+MBW7J*t6 zY24#s9B)?}Kt>WHLG&h*h~6}+a-3*SFn|^S2q-!y!yp;8QGLVP_=^#*PQ8j5#St9? z(8~4Ex|{8(K*KOq5VxAyW4-r+Xx(ainD>4_w64MC{Mf)3MO(fNOm>bJHAL@!vEP+p zxnM0eQ_eqmQAe1GX@?h85^Z7q+AUtxFx*eYsC&Gq;n5b>nSHAlb)w#_V}IL=8X0Y2 zJ=hDpsIk!&O5DalQ-}KT(H6?qKGTbuh;xmM8p$XbOyBfvbRHTEUxZ-VI2sq~!?_e5 zMKCPPn~}6!&{P0a`i`^1spIKwNK7D;6^*fukatrgG&1VkScsRgi4lr)t*rOx*A7rk zak!AN10WhibCU4JMDB;g5cu?%3^QkZhFg#(}LAIv(x zJRdSjo?mjpIG(X)PAyYNv7^WMVx4JZfqj54)=VRvt*-8RQ<{I8oul7Bo#M~teH^L& zLDa*AXPJ6P_A@%}*M9n=qk%z!C*6b{OYu56D^4z-LFNt3R3*$ej)K zKk8~xf7b(Bf%CAaKllfo%k)U!ku5;t-lWG}CmwyE7x(*=Zv;u8wPK_F^8-J}by6vG z@iZcuu!g?p8IXW4!O=VqiAEnIeEFJ79F-jRLPZ?eA8FKizIwFgG5 zQh2UW*Z-fU?;vj<`mX*j*Pr||nts=xoOb^r&4sW1q@IUy)^{OgcpjB{J(UhemBH&j zPwlid-nR;~1*Pf6vj@+vdF{a$>AmYjMu@m`LU4Cs>@;HG{SI@>JJ;kD6mEt(VX1Cz z1>TBqGW zroB?RB+>7*CqH~C;g53R9iv42W-9?VnnwoTiH-^c`0?2|asvhf4M^Hs10#uVT7e&-*lYVfO)o-vD7>Nw>$315kb=*F zc<(fl;HQuEi?vdlLp%Vz#L~knijj?qhM0E7L`#cN_j|G1UcYo1uJ0Y-Uj79*D=`v3 zsPAwOhcBa^`=qWnQt_dEF0kU17ZhMZ@Wa9zcHGu)`V z2?D1nH6@NJioq5PQ%!%FYVTV~UMc>3Ygu8e7dRMnW#EtjLm=(}q)CoR1}~F=!Qd^m_U!>!#n2kKH?r zw+4h@XRvs@`e+ssoAgQ(ALHgm`51g99$SBl6nn>25I+JgNA|-aHum9;rs~tQhf=4F z!m4At2pqi5m9(v>AO5=@vg$4jtIT;w4=`=7=WvbgCVFP=Dez>OGSOHk5ZG*Sf%9HJ zIu`CO#C>gO%x+9uZyp-1T;=wyX!)*iV!yToZn1qSrvuF5P&Sq)!)@QfCsM-i?AP{A zLHLcAa-PL)x7hX*d?6Zqq3H=A>m_>9!tq-01$+Z6W8#d08~}Hjn_#*U6=2Grn^&#L zyKHMoX%Ewo;?;?f5qcKv^Fc@A+!t-G#%d2fM%nKSZpGijL>gCloXP>Javz6bYYIM! z9dD0@dqy?|HzTw;Z0`r^n}Ux>boZab_Nh($23@cfDTSNMTX9R@s15?l+ZyD4f zy9t7B7x*kJ=YpNg(PElwIjU%M4+qD)B8yL+-mXcsCD4 z4V0sliq@eRGd2ZVKsYXt#oDiY_#Qk`E9`j-pvl%TK$p1KUoNLEK-JRLo!1oHnjipP zw*qGiusPL+j{0frlmQ8K9jHB_x@T`w@L`deL))X!T}dM4dVvuniZ2|Ja{JBsq=1{E z3{Bzs#{kDMpj$EFdKx|YjiD>}@=r!08iUoq_&WmJI$Zs{T-Bc+K9thPe*@1(;V*pG~A1CBuj(8 zkV`Q-EpUft&inx5M~n;DUz)Zu75fuV9n#O^8K;VVmbVP#p zg3L~oRR99;tnVCHc*8_8CDHakMn~%fD$@4+d*QdwuwQ|RaGM9l*iQ3^qj@BFS$$#! zMI_32qja$K_qM_fh&$VYoj~$7k=)T2lK&T(d8RKq`pe9kzL0=%N|W}rqcB@9IS)+} zz2w#M7+j0T*0+$)zPs8~t5dg%CVYe9mJ%>i>v}uY6bv@7+A?a zsVeuPiOf#64^VDxP4?li<;U&?I!w~TPfQ-#$>%$Vqo-T-2BTMfAc=K*!tai4d$(ZK zTYMe*K@<@?&mQ{0AcY^B3I6m{_&oxToFAVad1}N5DS5(goTS)3IAg@K=z~|iF6F<^ z-|*{|^cuMHkNRYoVC5?b_Af7p{L8R*Gh+aC`lI_L-Q1!H~&XyhO~QmovC&RsAN)ys&XR zeoc?=cH-OX?z7j-VSL6;yq?O5K_Kkl18v;{bK#r{J^(6c1Ziw&Dmw}aZ}=8bOSDbX z)PXR~n(5l@?^QW{dV8=LDEB6?1CzN+L1%Fl;K3dVU$CX+u+&;Fz zJ`$o3VXun=Xie~En{v!XgtQTq`^{W64_|1=Q8Jqn*D;Jr9K?{|PN)p}BJho!MC>&< zSHS8(D!L|-aUKh`-3$9$kcr7fV1Y?Bl&23PKrD`;{BAo;nzi?{~U@tj8#8@uvd;|3J#-F0z zp_^!v7L16k^8>pa?Mx>InCOZTLBC5lojOJb6>iTfz-$ngdrvBCG_8q)C64Ml6O#z6 z4VKML$U2ky>_F8uoXM~Gbk#6QxCSNAi)KKq#1Xq&V(I*ko{|9 z6sxMXNU_Bb84Yxg%F(3bSSAnNqyb_i!3jMrTVf`mUO>>k9-o=9e}S2ZUH6aA_uA9y z@pC5@(Cp1WM8bs_OV|tF0bPOXPy+MX|B0V8lff*$kiVB5m#pL4@7?Wbr9JC)b0iIN$8yULy`RxRG9eJ1gFmW0+$03)B|IxdUk-E zAndK^mHM_ngSB(~XL4UokNs!#KWWlDk%Wl*F;EC}_BT*a8c-&YyXy_~_ZX!HN6%60 zoO6FQxCcxjCeD~M-I(^k{fOvGew-`8xM4RUGFjqr5tp2?@7n%)WpfyGdT>Vp-EYo~ z7@fxZV6Q?EXohT4tbMJ9SET8MffgS4?(L+)o{3+S*V$gmNwNtN+yvpsae|yQnb=}4 z$N{p6E%AcGydVcG27uy5(|CmsOwS_09ieDopl|?)q`NqFpkG6?w2!|SnDYMFNq+jw++VBIkt=G~ zk+xIO0zkQdH^(a{?a9oYp#yGvaOczN`W%W(qn^`wKArIRV8Te)9noW%oIeZ$mO43i z=0Jx!fl1{F2Lr$9UAdWD6$95t{Wq-9{WmY6kq|w znw)5mI5o+_Ly_I6#cGT)&G}28FTig*eDtt?@do1b>G>83b_SmTp>jG7qq7QI{n?H) zfukb12>KKjE&Wh=CqJ8oFu@VdD;m|dhQ~|gBU(6IDzkEqQ9I>3ADZT0GEhx)?mO|( z_TQy{XYgsbAALN~zNr}1H2`zAP#JJLL64cd0xWdUaSDdWoV-6(_((bag1bhv;nQDh zf=`3Lp?*G{^igg4bua@0XWr9^2#Qy>*OWsrPv&&2DF*^PAqIzg$Nm`pcLv|)h}s63 z6tpc<2Ip&Kuwdo3ey!|nBj~pUNGAh!;wdSSX|AB5_I`bLaAQ&EEvlwd!dY6LDFF*Y zk92_Tw{p1p^A^^7-f|}+Hl2t_!qN}ZlFEMo<(uxoI`m-4}7_#C(c$6dhmL{L4}VzIRZi{e``d#P`W&!U_H#p#q)!&7m`xP6OSK> z0_{^~pj5ZNy+vmtY@C6GUqcOrCspH7RCt4~`%U^L-tbKbF+gK_q2IAdyNBmkx-WC| zTiEatPqj(T_`z-eW;F~fSC{XFL3jy<#|x|t3B65xlhu=(IT3NixK zl1_3k-`az{cvuM@hvNsM-Ivlx*3U4%$JOFQwBoiNtrRaeY6@dciA--C% z10xi?w1%RA*N;Vcxq+AEftcLDGYrZNvGsBLDTVCL!0?xKAkd95#Rg{KdX9Xnu@hx< zhjJ&~$k^z(>+!Ik!)Oc9xNa1my@`&yigDYWI9>obinD#0qtags8cU>Px~-eBkszlt zbOH{82>vd_Q3J^j#9fcG9^;+>%d(2>oFzKwLMgBt$0l3l=5Z&XcbZ9vj{EB{P$BoX z@;Jjod$h9)nx)*t!sY82ff$bY>0yT83laYpbsROZrb|KBWEe z8KOF(A>E<&IGPmBv%_`x2|eI+=67?UOm$!k3QN%eT_J>q7Cr6K92(to@8O>ra9lDT`wsSexzwR^alp9X!b@*KlB7kj^X$B>gQI0kdzPp%Q0lhzYw|Qznu2MLnof- z{PhRLGvf_s{{l4GvOhmx=);u6Zx0`*-LnT?fS+{$@MCn>M^f?GkGrU{!?WzzxRt~% zs}pJFPRF~@H?428|G;iXOG~QP*M?FEzIuhQr~{_zC@39zIA=Z6*84G-Op%EFKIV_9 z7C3o4s`PV?XLC5O{~T~@%l6Ie@|g|ldouVZx~#&R zXl)%I#5rp5V0%QbV8~=hh4(}b^-4Mg`Vd6f#7F^fC|hu=ESl&q>W@AScn;elE`A4J-#XUAb>i!ka0pqoP(D&6%XNd zjiDGvRy?@sjRxc!)G4Ev4p?X|G+~wY!8Jhu$$0r8DZ}yUMp4m8j+EK{Hs*|HlInuv z_npDt`)Q_lx(aWQ0fl&Zl4|KCWz4jA)Qstr%z7c{p}QJ9ul0Cl@6sxZ>KMl2LK!=U zYo6tCQ?p>{=~eHOSJZUmN!B%drTAC@%}X;=nTreEm>!6)6%y}(O2d^c)gKQHf(#*A z^k&$R1nA!r`u9SN3a2$Ohpj-&6o_Wc$U-iJ!KLlrlP38#FC>l+Xv60w@H(QOSCNV%vRd3Edz@9ktGnZ*FNc7C)v<)HBznJ?&asMR-M(z|*KWrJ1-M#n@4mVfhIN3}-@No`2;B=t2w#&A3 zmXSCDool=7h30EmjuYYAUSG!x?oDIXXpELa&ag3nGi=!T&JaYZyQt+1TPhNajSLdm zi{5h5467I%8+ zmoI7Al-{JG%G{y~H=``)SP~tRx>39dJJ*d2KtAK4q0R>JWRd8Gu0{$S8dDn5&GQiH zQi+K(hQS06Zn(9x+BFdl?Yw_BULxYnLusQV~W@h9mfmY3WLu! z&LQ9UVb$(VB%%pDXb^Dl6v?iyxG#p{#dn)RDjzCQy7oRM+Dj1vJ9LI2PkWg)8e^Pj zF9WK*42eV-FxtzYhs4ufZ30a7BSH0NL!v6hRA}!R!nN=c(p5RA5Ys>i6jKvu<{T|b z#`-WI9qpzBpxwn#-X!g&hOD=#Af`)wCK3rnMTw z6l}lKvJ$xVhwX^)=#%Tgd76}#z3rEhmW_7AENn;r6Vf6BT48SMdgX8a*7Y>r(p%R= zGK}v(evqkkNg_Um1eVac9y*h)%W+2HY5Id^*0nQ*o*y(n(l3qoHFe2Iod}@RjdTg`L$E^PQ%;=UH#O8WVmE z>2{-m$pQ#ivxqSe6^9X3DVovzQZ^&Ez8v~7pV?jlDEl~~!w^reusXUabTkn6+It_C zN@~t&3Vu?!TIU%~mRc+na(2Rv2%oG8e!`6i_fx@iQj|2FbjtJde;Y0#ML6#7;f9-v#Hb=wMNE&G0SMue4KSEJS= z3U=p7IyBY4uv3GWG6jC4!EOZO8pLp4uPw%^Z6qcmnP`Z?6=X9SD#Fr41Gk+6SrZzK zK)BZ>{2Cmg2Al2UaW^}vsyP&uA=_38-0>W3$L10gc+=ncz<=NJlY6FP%BtPkKY03577ZK&WaZ9B82tKzxdVqisw2yRG(}8RLX4*h znEQ$za8@Qt5I0KD0fiwZF7tmOg`rB=AG}S$xO!rbrk+FtSN$!c47B61Z=IMd1IOOX z_T7If2ps#d2~LTPA>Myl%0jP4e$mp^cRIC^a?$Bm48(SU|L0%GUf{)dl8G8fTJ?a` zKPL!9DeZrnXU~2pc^!H>Ku;WP7_x-R_CV>oe4kJ_iTxASS2LFfcc>l9SYC-tMQLxi z-toj!y$?0ggB1z*;Gex*EJa4wwfLT2q zIvI|%wQ{@~dLEP*^MowX@=fAD)EPQ1SOB616xFbO7=Z2e@lr7cwdwjLTZ3lfn8Ue@ zslmQkz;*s_rZ2h18vtpA<0q&l8XKw)eo*r!aWk84c7o(63@7n11GJa%qbjk$P@Fyk zm1`kX{6SdepeJ}~DARc#J{&7E*t;mEsnNSArpOV)rbaFuR!|n1F{~)mK6O}e2{_!_ z>2EeFGn-XL6K$W zmkb_uTEj3J&@$>!!KOHz3B~wl#Lg`w(3WP;A)r#Dp6%ZK6o?QkYJ>rDrqukKe9^8RnJmP#v>%jg2Z%;dH8m8>!aW<Y9G)u0fi>UG?YJQshr+sZ(Gmi3 znne0cIZe`v%yDtXt8Hnxg*P3c%<`?(ENi>Y<|Mk6SpUVrD*wX ztlqVSFnF=xNJ#?5t>Y9v1n($<(a!P?QBAV%i#Zjr$uYxl$OjOyz+^1Q!&9UHPneSl z9ZiUkZy*&(0;^aQ8|&CxqzihHY-Onz?d~#X->up2=p*}Fre!Q0eOfncMVGUliK#|? z2U?L#G|3_9%q=hSHz8FO`u)g>wjYYE7q9H@Qq;k;2ebAQsdy8L;O}>E25~lVh3oAc z*U)Jh(-0pGz8UU511F{^+P5xpL<{|MMB78rLPK?1s*Mb7HoBcRz5eI+6rD|;u#3J< z{w8~%1`-ktmf){g4-C%&DuUAh@}X#+%P$k+vKOAD{TkWIYG$__q9+ z+tY=$_R=EK&JnL;&qyD196kV^VRyF4WJ*5)>EIGe)ZX}a)$VCDyws{_;1{nkNELxp zwiR$d9}x!==3Tuqf{FFu0UtQry7Yl}GqSZCc*!4*`C=PN03mzn3pDU8f+D(3T{(KG z`uG31hbe+CD);?|%mwLXFCnG9u?slKBl=nfVTc1_V8gG`6vno_z(|uuU-?cummI|O z8ueJqa8R?glSGuC%=i|`pviJ@(f&&p3W~LiB!3!jTPz#b5emzVMSS4WLVc4v9pE3< zw-QYY>DzEV+8aAS6Nn(q42C9%fC_J7N17&vnaE?``eG^#;%%_cPN%IJQSFU8fEU=X zmcUR+AtzJ#;zZWn19 zOi8LHY?s=b^am0W`0^_R$Nyx93$+&GoNoY`f4?u=0?*C{KWyT{dv8-H7h$^f<+>Xu zazLg?9*0eZ#El59N1N?4^}Y*F`E`Hbs5qPEYDmPiXWh`_w%67YB>3m6^OE0Tq+c%c zweMD|AsHUEBe$y?d(n#QxTM1Kr!c~zjZk{$zk$tYWPPOufYI!?KP7Izk$Hr*_^f63jot@eFZ^SMKYCm z5Mi2a>>vN$?GFcq}7lmL7xW+v?X3PO>o!;0)Vy}XCQ zTsSdXmgN2R+wo8qPs0T)nKOU90e4qBDn~-F-&m|~O9a@@g;#21cf0KT zry3)|s{^#PG5vD-#QqManpg|Nfp&nOIwNkBAick*VE94fUX5p_I4uX=RL#uBHnb~4Qr-?qNhZN_(>??F9Bo3S<@DfMEdYReZ;=`hqpKjw_6z zSgaI}vY6a?Xg>#o=c)1Kf>qU6v0{n`i(22gy#xV!Ec$g^Is~e z#1gkqO!lidp`;~!)rayXptNuP8s+5Y2>t+Yq@`8^q&;{uVlYs=*||zNa1v%3enZcZ zyj~oPQ6wuB=Kee&PW7)hBMJS)yMNH^GIn8Ax5%Vjt6DcHfOe_o?Lz-xsk+`8XQZs$ zZzeVj3IJ-~iQTilVVwf31Ar`StQBps-#w0A54t(DK;wRZqr;#v;(GA!j<||_Ma)DI zkOe7ANb5C4mvZB7tR7hFR4eTRpV1iI*`h2Q{xnyr>28wiB_T`kIbmP#3z=joiOxc( zz4zE;7lF1PjwNZo%wByt=!)I?-p?4MwNw%1%v2HlQwY0t4iGyv;UQm^A! zTA1}JZPrQyUu3{TFdSLLTE{5XroScI#L(736OK%Z^|#Fyi|e0u@h#x=S(A2xDH3 zp2gYxxIa0;L>+fIN!CWms%Ah7n8U2%0@b6*;ra&0Qq4>Q9_LW58zxkt#^IKh&R@3g4vg^MyZ$z1NSS5Y+xDjXdZk0&DBeSE1?Fy(Y{=BmxBl zTLGbdAiYs!lX!iU?OW1NUge?0%rp*-H*;_tvqyLnkP$(%kz08hg5wndI_A51v3iGkaUftuUwx7*E0>sP#IEyVU03jPe$VPa#;s~!tc)fVK z#E>TGvG(D(E@S3 zJUW=QTuFe<|2Aq!CMygie#!cB%mOaq126_>gJIw>Zvxezwe) zQ^#oBuPe(hvZ)tiF-&_f-qP)(Akd8-=tak2B{!iI3(2TMj^L1sBRE|U z-5v~emWy~g0lFzN1e_yLZR94op9SbR{w6wZQnd@icgP#}UsK~u0dy5APRI|)FivfI zBj5hL=m_^A;Da8~ia6ozViXGDDFT+25eeZHNs}T$gkV-A&#U%fTQIJ^f$k?RVU?s2 z`#U9;1oYSsNx|^V7y}_1FT|0S=5#u6^N^uohtizP$ORr@J@!Qzk(20sBELeBCo}kg zBXozsuPK9>D_8AIQssgrkXybfI=0EKlM;MYOScT%F0=j0=&SDxH zmA4y;hD9On2Jg;OrQ2!mU{yGGylKM#oF1bH-y>v*wIBle(cO&ZK49BEMk@id3P7%6 zra7?fd0NpnUOWUm?jtI{)Be6mw_nwOhlGr`y`IM=>oyPDHV2iujPiCR&e7@^nOq%H zef~FbON#W^qQO7X$ti+7e_1c{IKnJyf;u5cd!tiec zLUl8CbilymhDKug$`E}=70tO*Mz-094dQ&Uc}%ECeZI}y6hh#Fw$}#SR{m{NJ7gaH#Ed08WDsZ;-h*ju9H@1dE|zMxl|*9(8GLG z=S4z!3N4S^&{&shL=bwcM|EgOLgP~7s!4NY|EMg*ZJ`sh9)62cdnSUd8_4ISXpjR-;?;G_G*Na#d`mPc;r z2$ya|5c&uo-4i29u%p>Pout{o>}qyuO?sNG73tUPG+9IcjAk2IL;qaOPLqcxYasoz~htOEO+5@mmt3*&GIQWVtCZl1H(9b{!`bOz+Vw z<0~08_cE9}q$za_{X2FFGSKqo*SxR7@o7jO%yAq18_^1T3kDE4n~5J}DlzB#vDnIS zh3`mUr2G{$RXV;qIg+-2`em~nx|BH?^Hpabw5|=(d$DaT7kSQoXOGOy8o3teM2gaFof7%FiNDh2Imj1g<(jXV4t1tCTFz$n~%8|hPmb8$nE+>5M!%D@~hi3 zOgU5h9jkUwj{Y%`Gyf{#@l)evN~sZhCD_o)@W!V_F0|PjD6V2#HfqRXX3~0#InU!Q z>zlgkYL`#RsC37I%OvSCLZ#akjqydK%YaCi0VCZvJ?VDYPl=e}5%NnHKBaBrk-^@x zzbjUGU-*#`>u*@aj*He4>#oQ8BUbf{KZ-9Ti;227g=NG zkm-BL&KHuCuPG3)+}05E0d?z@RI7q zRm+xCOQUYBT2`~T=8BaI@zxP)t*u$Ua!z&4a=gV+IoP@0txtNy0oP@$};n;@xalF@FtwFjVp5C9K-E8T+opCPK04) zB%c0=E`+(5wZ3q*PQuYBgG$auByIGe!hRTlOBO5~42I$|iSAE~J< zf}SixMtxkb+}OLHFbxSH#Uq%IPAT#2G;8)BJV=(}uts_uoSoYbBINxFNqUIWp{GOX ziD_y_0-!KnNcM^{3|;9V$iPsUrezdjRhVnefeK7Pg?5wG=3=BP{sjOR8lvkhO zcmZuRH8l%EztYrxFx2Eh?t`Jr zJ$f=QRH~`eJyw*XdmW*1?^4Iq;$&!H{2pF*F5kgm38`7Q+@-1g;PM_1avxl-P{=GM zW0t1&Rc)tw2w0S3bBqlUe0YWb(7p?XO1Y!?1?p1AVtnLi0?T3mii%t2qretj90 zeXzctM^6UUcXKWQ%aH(2%3fLr{iK3qVd#EM&B74Bm)F16HrIpP2ScZM^kiV@E1KFD zL%lK?%x3jp*3>KvJ*KJssG;w9ko#b0zDG|6hR)U0z8JzaYm{=98XBajSs418Dsw*= zYV#oX!H}$QDLok&TB&%lw>AvNrNuP>5PKC??O*`&g<05Rau)tZc=t@gPkECO=(PKW+@xUgT zjB^s{?JopAv)L{@=^d;g0~2=H0@G`FvA4m1$jnxDAOv<20RP$pafJBKw?ZpI@TRAp z{OodI;eyy6`!r;U1j5K1?%k`mr`v~sEC>breC%NaEboOEJq4LZuQ^{D3!D!m*cmVz z*Gx3Fy9NEi83^|T$+m_X!-QZ!;vS#PvvaKtA*&>8LgS{Wv9!CLMV_m#6&UP?a3^6 ze4e>E{!m02^Q)WIWGZ3yubG(fpyqH;U=Kvrc6Q{>_DH=NpcxUazJ(`^z_u&oAefDF z-m)~f8ZYkpODWFExVup}mE{0X4gubgFa@B?Y`l>F0$tLZ|ryCv$Yxc3inxaJhVxZKp;%tW~)?uIzQGHxu7abhiS-xczEo&0`Ve!nBX4f1<~{H~VY z8|Al2e%IjFvgY$Av6!L57^-IIWQN!V=by*WG=`XvIE$fG3{7NcJwx1vG5-;UhBNdm zLp;X!s&^SMbx`YY8HJ|s_h%S}Ij{NR_;xer1dyoAwo@?iyz!Y3Tx_5plL$5G2 zjiKiln$1uLLv;*oXDG_h!whX?=obvp-JICSP&-54XXr(SxchJZn+)B+&<6~yU}!Hx ziy1m(07BIa4P$5)Lt_}4#?Tao&SGdfLlYUQVrVo&JXn_)&d~J?9mUYC3=LuEZie<_ zh&C!gb3K12Lm$IAG5;?N?PlnGhTdT4bB4MY8Vtjkc$%Rj7~+Sn=by;XR)!`r)QnJN z?UL#Rb&IUZx%1{NsGL(Z_o|x3)mG(##S4~e)Tqjll{G7t*DPCHwXky0lIpsJHP!{z zw2RN5d}di?X!7*RGcUS$+KdY-&%EHGU}bRX=yBslot7ba+P453s+=y+(tTHPFZ04lvb8JnPrv@IJIu)EOFHb!OE(6v>uBbjbhZCnt4m$ z*UuQ2q$UEqWW{wg%a)|YLkLT&7A*7QUA$!RT-9NJ?2@H5i&Nh-Q3ZWoxBvz)xzUh> z+M23`ehhrCOhwDcG$J9JWi`t)R$^R#jI!)Fs6y zp-Dlc;2RaPy$qOxk)6-yS6UNCA*GNeDPI%8sW&Ah6* zg|4A+P%660MKy~SE?9&iER0*tvZ}eu7hL0-glwR`K~Gi?Ph+!!EU89;o&m}RS6j2V zc0nIxEm>OIUk+T*>>PL?Svl}P`sToe+ZPA5%a<)(Rx@vbI~GV5MfDe`Bp+;69{n)c znDoQ+$)yi)A8b}*7|`FE^1=4OhKp}eb~ZeieX!vI?vssd)g|rUZ_`aRn)oyt`_c4V zkaQ_{N!bx&5}K)rxmmNZ>dGR!QE)-hDf5!jDD#psQ|6{;qiktaZSA$tId9r!gzO}w z6`9tRnGChk^&qtt^WO2=9ch1(0efG-=_O{uywDwE^i{rFGuZ{7krX#yx?uJJ-#_YI z{MqUyV<~QJenD>BeEsCcgYO?VF8mkb#?i1e)yv4c1KmH0zL1i_{-lfnGExArN{l}i z%v;${=%n#WUs{^Y0J203RZ@#X>|)H+YAcu3WEk6ICv$o7L`vr*nA5mJ87~IzxLSAE z;*a)wv3&7nI_OZEo{|lrGmm6>m5Z0mU0AhfX|s(o zS%K%n+gX#MS0bORIb`~`w9d2N`zXP{d()kLz-JbFR&oH2nO1V$W(CgX(Aoz{Ux3@C zb^T^^Sq*6KC*kQ6$bFE!yy}X|q?vp_;Eig zfE5{aF1itrj0t0)Hy1sc_eN{%zAjuc_bT_LZn6J$X;rn8Qn#2dro#PhA;-HX@^%3B z(NL3NldHv~xk|RejOLwHEXx$=JdK4gu_s~5{q?!l^pj49EEm=+E}1x~WJ=lO^Ma$N zR8A?o;LP);Ts$K<-61mrA6AAqAY4+n++eFX=lmJtE6XR(v{27!aeW9#bQMz)dc9Ui z`*AoKX52v4mJ_muT{FoVwrG-dA}$g?aVf%XQne(R9R6^%2Q$nv>6WjtrU}v+FZpx~ zoKgo3Thq^(6%pMFIY%)mC&^*rsll_(4xMxEd1dA2Uoh>$i>A-0_~y4} zUOcO6&RjUpu9&~z%BvPGTD)ZG)yry^*Ije%ij~({!%iJ$)vl~nc~~b`*IaXQo&Uvz zYpdqfpc_DQ@JX)H%?K`P7hG3Ua(c;dJ}Z}xC>dFyQ4rE}SEyLm|L_0%$%>1=^i+AS zwQcbs6NYcfu|~vmX5aI6zyDB(CAjIo|}IczyGV_ZohM&b=|ASUo&l5u60pE zZhY!9xz@ESOWSX*9cb-c^xL}@m3r`v6K?)!=WTh`C&&I~)zyEttVhaD-+%v)ENgVx zy^Xhx4p`?+`pNU3e%rE6|8D6Y1K2rrPwk6`4&Q27-+t@EuMIzafHn5;C%T^-lVh#A zb;PI7U65zZ|HXAT1rE-!R{r6(nd?^OSsSmobL70i0qfnrZXbNo3j?g3J(10azCX|! zdB?kDo09UHJq_JW*EvDw$ZUJ@?v2=UBUb zalxsrkL6nzl)v}ktdj#)QS8(8rK1AYwu-u$-^DJ2NLT0JGj1Dbt(#Kz=#6LQSvOCc zGiuTAa;=^BENh*Qy`6WS@cxxI+!L_IJo?jusrZo6lo=ae{o4wBBz^L@gMW!@7EikI zqu9H3xmL;IAm2^@ZKqZtsXJJnr_WF`UY&Ny!4h8cfbGX0ITJf zr@wt4E~jq1;rLbW|0&nH~ETCv7o51xg3e)QMhf9w8lL`u@~jZuTA*q{GMEE)L)Tq(5m@$SJSEQ1gw=)e(>Up*rWQF zHxIk`?FaI$KmO@)Vn$=dm^~qm%1kUQtvHo$z9bf5;2dwD$DL-oZW4_gO*0B%H`dXe9I`hVJ23(SF zop9XJqSwD3uzo-5A2S|274rSXH#(nvJlDGPt^w;?*5p`;$n1aq7W+T~ciw*KwBz!u z=dL@Tsrbq~>*ZZd#mC&6XZ?Bl2^ZD8l4~ux-n!?QCoJpTYyWiTBiMxZ^ueDWWgnGi z)ep+w_Vbb)YsuK%-}&d*0oHAgUq5<6PQEqx%D?}8G4%JL|9bSEnw9}p#bFmmp2m)s z0oOcx!wPJ@TJe*w*B(4IT5eRfoyb;rh+mu$TQ zdj8HcU&_T{g&*7-Tm7Y<5X~T;{DD)@2n>WTDf_}tG8C>TY=cn7cQNUXMMS)^Ujm68DRbTcd!2A z?VUN+wl^O5&7g}R=hIKRBWGxyRsLYn!7m=<*$Jopr#`XUr_X=K+3Rk7;#Xg54*urD z$OrGO@A<=@9|%0ZbIfVK96z9G(eyom_Cv2+I;p(tf~^zxzh-dJ6@|x_O#ke}({}AV zzx|)re(!vm&rPaprT#|345{ktCbz4i6)`oBN_(WQsaJ$UAe8$P+`wNGE;_A39s zqIXI!Yxu=KE$g)BzxDOd-v(GSA6q`43g=Dsc1)_;|3cWi&n{itaU&wkzyB+H_s6Rz z-nQrMJZtK&Hh(_(nE}={hu80S#LRrFXLR}WlMc=N#B*)5oaN;jsYYkXkEeBoD^NRu2_cqL%b@8JEt;*}}{OPYZ4X~~+c<%hQ$_db5>K+$0Gf&Bt_j;=U**YN|bV?TRx*W^_Ltngb`e^^yM zz?wC+{lhUU2UvslUb7?dk6dfYd0$_D6?XFdb;Z^T55`8irfu<+##xOQgI*guVqU!G_E=EN%>{nk+d>)xB| z|6}t&%NjG{?Wvz(OYE8@m;LqpLvpOOwO?O-*_1r%&^Hg?Q60^-u0HRb0auL9vsy;Z zK6NZMEjIk-(}M%=S=Q=fFZ|%=?_1W3JwN~ME&J#EpYz37B@}+YJ{5;n>{JZx&`O-%@R_~tAZeMv&zV+MK8&7TO8erXYz@T6K z^4xrD$L=S7o4;Uyb>xPV=N|ZKp7qjCk2&tyrw3RsPdU3P5zDb&X!+BUpKr*w-nD-C z!l|zgv@XB!7w62Fn`_;F+1Z^dUeC1-zT?LSpS^#;`p*vwZ+zmF0oJHVJuhA}IM-@_ zbKuZJr{-BrC;k1Q3lFxeSr`2M&OKuT*4T@lIc-)~z^b+nzIwp{IaZ)!W_Tm?^Y+(6 z1yh&gStoq7>FP($&a;kue!=Li*XCLc(>~hz=>*Gqt#W()*LLJu-z$6Jp>aP6Sf|(h zp!Caa7-zh>#JVXj-&*qG_NS^YA7JIpo4IQ^=z635)~`puAF!UO+V+Q^oMTyUeSFj< z)4rTzEuJ~{#y{d%R_DpHb~a84SS8DQcg?vxU`<{7%c?C`Py*ZNrc5&9`xEAm7kv4w zeCvmA?`NI%1IxPZ)9RIFs{_`gOFn)5&{J}(-=4AQ=@FqEt9AF)=byZPzBRFW&n-iS z=UdlY@`K#d>#YBWqU#RGvF+mL*-a8nN-4>RCQ>LZ(IQ1JMUp5~(vm1?(NJk|1zn&=jz5H++3afPHJ~v27_X3o^9fe zbE#F|;hEHN9&M=iKIG%YqSMZbGae=)Ia`ps0LxDs)>ej$;9MAT1!h?qbBV|A#j(4~ zrS`%Wy}xIfz!&V)o>30j&1$~=rJ6?q^M4gzXl7Gv@jctY2?m|qs^2%_$D?mG!-ux~ znRNE9b(dIWO(Ts@1u)@y*4B{3T2ZQkr&T z?oKYfJaTU3|D(8PHf9J2Vr+9>li8<0RKty(er1dp~G-cud);nMNxAM$9NMVg%} zlT`2r!=rhEAL4;SW*_#Q(PYA;r++@W*>rL#hCTjfJ2JrWvhIaS7|MDdFvpW0c*ZPE zMR>_UHm&HjkTV9J+2=2(JAmglfBPrt{seG%QJ`CuH;4H9ABT!vWYa%YZRWr-BGHK? z_nW4`4@J2}>>o_}q>zx%g*io&Xm(oRUl#rRkKKCh67FS|{VFo_hsel6@qWfLCRLf# z%nXRrm}e60a8#D*$U z<^=!7`shmLNuOrZ*I&|6n{F~lJb0;f6yC?c%H$dC{X0Kj3Sjk(bxF>P)iV zGfG}xSS0B!d%^^LfIdr2e^y8B*qQSw|+mn7w zn)g-H|3^HVT9s{rpZ4%bbhq>dN1RJTu^y|%!0nwcK3!Pyk43w+6QXACVUm`3f;{I0 z_+6Olx{*U%I=wbOA{~=5^#ec{ZZc8q-e4Z7ysG{~Uu;{w;58i7|_Pn