From 0e2aab818ebcb12248d4b825795e44caa81e6aac Mon Sep 17 00:00:00 2001 From: Naxin Date: Thu, 6 Nov 2025 16:28:05 -0500 Subject: [PATCH 01/25] ci --- MANIFEST.in | 4 +++- Makefile | 6 ++++++ tools/source-package-verification.sh | 4 ++++ tox.ini | 8 +++++++- 4 files changed, 20 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 7e9bbf313..cd0c97ef2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,7 @@ include README.md include LICENSE include src/confluent_kafka/src/*.[ch] +include src/confluent_kafka/py.typed +include src/confluent_kafka/cimpl.pyi prune tests -prune docs \ No newline at end of file +prune docs diff --git a/Makefile b/Makefile index 3615e2b93..100c0dd14 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,9 @@ all: @echo "Targets:" @echo " clean" @echo " docs" + @echo " mypy" + @echo " style-check" + @echo " style-fix" clean: @@ -14,6 +17,9 @@ clean: docs: $(MAKE) -C docs html +mypy: + python3 -m mypy src/confluent_kafka + style-check: @(tools/style-format.sh \ $$(git ls-tree -r --name-only HEAD | egrep '\.(c|h|py)$$') ) diff --git a/tools/source-package-verification.sh b/tools/source-package-verification.sh index 0081d584e..bed6f0d44 100755 --- a/tools/source-package-verification.sh +++ b/tools/source-package-verification.sh @@ -51,6 +51,10 @@ if [[ $OS_NAME == linux && $ARCH == x64 ]]; then # Run these actions and tests only in this case echo "Building documentation ..." flake8 --exclude ./_venv,*_pb2.py,./build + + echo "Running mypy type checking ..." + mypy src/confluent_kafka + pip install -r requirements/requirements-docs.txt make docs diff --git a/tox.ini b/tox.ini index 2f7b43348..3b5a96b50 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = flake8,py37,py38,py39,py310,py311,py312,py313 +envlist = flake8,mypy,py37,py38,py39,py310,py311,py312,py313 [testenv] passenv = @@ -20,6 +20,12 @@ commands = deps = flake8 commands = flake8 +[testenv:mypy] +deps = + mypy + types-cachetools +commands = mypy src/confluent_kafka + [pytest] python_files = test_* testpaths = tests From d0de972adf5e4d690d57c6f27141b43c1528128e Mon Sep 17 00:00:00 2001 From: Naxin Date: Thu, 6 Nov 2025 16:56:45 -0500 Subject: [PATCH 02/25] add types-requests dep --- requirements/requirements-tests.txt | 1 + tox.ini | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index 730bd2be5..022c7a8c2 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -3,6 +3,7 @@ urllib3<3 flake8 mypy types-cachetools +types-requests orjson pytest pytest-timeout diff --git a/tox.ini b/tox.ini index 3b5a96b50..a840be054 100644 --- a/tox.ini +++ b/tox.ini @@ -24,6 +24,7 @@ commands = flake8 deps = mypy types-cachetools + types-requests commands = mypy src/confluent_kafka [pytest] From 221266f9a9234b60f5a86578527fcafb4c6d8a55 Mon Sep 17 00:00:00 2001 From: Naxin Date: Mon, 10 Nov 2025 15:09:59 -0500 Subject: [PATCH 03/25] pin boto3 --- requirements/requirements-rules.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index 51ba6ac1a..d28976c95 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,6 +1,6 @@ azure-identity azure-keyvault-keys -boto3>=1.35 +boto3>=1.40,<1.41 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) google-re2<1.1.20251105 From 446fe834cf4d9fe546cf644e039b6db29900e90d Mon Sep 17 00:00:00 2001 From: Naxin Date: Mon, 10 Nov 2025 18:18:47 -0500 Subject: [PATCH 04/25] pin requests --- requirements/requirements-avro.txt | 4 ++-- requirements/requirements-examples.txt | 2 +- requirements/requirements-rules.txt | 2 +- requirements/requirements-tests.txt | 2 +- tox.ini | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/requirements-avro.txt b/requirements/requirements-avro.txt index ccb70d0c4..c8a42740c 100644 --- a/requirements/requirements-avro.txt +++ b/requirements/requirements-avro.txt @@ -1,4 +1,4 @@ fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" -requests -avro>=1.11.1,<2 \ No newline at end of file +requests~=2.32.0 +avro>=1.11.1,<2 diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index e4d6dd295..ef7cc88fb 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -11,7 +11,7 @@ authlib>=1.0.0 fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" -requests +requests~=2.32.0 avro>=1.11.1,<2 pyrsistent diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index d28976c95..42f12c6e8 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,6 +1,6 @@ azure-identity azure-keyvault-keys -boto3>=1.40,<1.41 +boto3 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) google-re2<1.1.20251105 diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index 022c7a8c2..dd2b0d8a8 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -3,7 +3,7 @@ urllib3<3 flake8 mypy types-cachetools -types-requests +types-requests~=2.32.0 orjson pytest pytest-timeout diff --git a/tox.ini b/tox.ini index a840be054..b5c68cbc9 100644 --- a/tox.ini +++ b/tox.ini @@ -24,7 +24,7 @@ commands = flake8 deps = mypy types-cachetools - types-requests + types-requests~=2.32.0 commands = mypy src/confluent_kafka [pytest] From 76bdee399383fe9a828db829fa752b01720025de Mon Sep 17 00:00:00 2001 From: Naxin Date: Mon, 10 Nov 2025 23:17:43 -0500 Subject: [PATCH 05/25] pin boto3 for now --- requirements/requirements-rules.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index 42f12c6e8..c73a122db 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,6 +1,6 @@ azure-identity azure-keyvault-keys -boto3 +boto3>=1.40.69 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) google-re2<1.1.20251105 From 45983de47079d8eb0ce880b39f6570164d83bbe1 Mon Sep 17 00:00:00 2001 From: Naxin Date: Tue, 11 Nov 2025 13:32:31 -0500 Subject: [PATCH 06/25] pin httpcore --- requirements/requirements-examples.txt | 1 + requirements/requirements-schemaregistry.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index ef7cc88fb..2e3c61dff 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -7,6 +7,7 @@ six attrs cachetools httpx>=0.26 +httpcore>=1.0.9 authlib>=1.0.0 fastavro < 1.8.0; python_version == "3.7" diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt index 2e947f466..850372fef 100644 --- a/requirements/requirements-schemaregistry.txt +++ b/requirements/requirements-schemaregistry.txt @@ -2,4 +2,5 @@ attrs>=21.2.0 cachetools>=5.5.0 certifi httpx>=0.26 +httpcore>=1.0.9 authlib>=1.0.0 From b4c6830ffd6b8e8c27075c5fe876b5f082f6ddf5 Mon Sep 17 00:00:00 2001 From: Naxin Date: Tue, 11 Nov 2025 16:03:24 -0500 Subject: [PATCH 07/25] pin more to try --- requirements/requirements-examples.txt | 4 ++-- requirements/requirements-rules.txt | 2 +- requirements/requirements-schemaregistry.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index 2e3c61dff..d47fe038b 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -8,7 +8,7 @@ attrs cachetools httpx>=0.26 httpcore>=1.0.9 -authlib>=1.0.0 +authlib>=1.6.5 fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" @@ -22,7 +22,7 @@ orjson >= 3.10 googleapis-common-protos protobuf -azure-identity +azure-identity>=1.25.1 azure-keyvault-keys boto3 cel-python>=0.4.0 diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index c73a122db..0ce39678d 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,4 +1,4 @@ -azure-identity +azure-identity>=1.25.1 azure-keyvault-keys boto3>=1.40.69 cel-python>=0.4.0 diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt index 850372fef..2959f8276 100644 --- a/requirements/requirements-schemaregistry.txt +++ b/requirements/requirements-schemaregistry.txt @@ -1,6 +1,6 @@ attrs>=21.2.0 cachetools>=5.5.0 -certifi +certifi>=2025.10.5 httpx>=0.26 httpcore>=1.0.9 -authlib>=1.0.0 +authlib>=1.6.5 From 6147bec266c4a1f541ac11d2d1d323fd3890ad9d Mon Sep 17 00:00:00 2001 From: Naxin Date: Tue, 11 Nov 2025 23:46:13 -0500 Subject: [PATCH 08/25] try pip upgrade --- requirements/requirements-examples.txt | 5 ++--- requirements/requirements-rules.txt | 4 ++-- requirements/requirements-schemaregistry.txt | 5 ++--- requirements/requirements-tests.txt | 2 +- tools/source-package-verification.sh | 1 + 5 files changed, 8 insertions(+), 9 deletions(-) diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index d47fe038b..ef7cc88fb 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -7,8 +7,7 @@ six attrs cachetools httpx>=0.26 -httpcore>=1.0.9 -authlib>=1.6.5 +authlib>=1.0.0 fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" @@ -22,7 +21,7 @@ orjson >= 3.10 googleapis-common-protos protobuf -azure-identity>=1.25.1 +azure-identity azure-keyvault-keys boto3 cel-python>=0.4.0 diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index 0ce39678d..51ba6ac1a 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,6 +1,6 @@ -azure-identity>=1.25.1 +azure-identity azure-keyvault-keys -boto3>=1.40.69 +boto3>=1.35 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) google-re2<1.1.20251105 diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt index 2959f8276..2e947f466 100644 --- a/requirements/requirements-schemaregistry.txt +++ b/requirements/requirements-schemaregistry.txt @@ -1,6 +1,5 @@ attrs>=21.2.0 cachetools>=5.5.0 -certifi>=2025.10.5 +certifi httpx>=0.26 -httpcore>=1.0.9 -authlib>=1.6.5 +authlib>=1.0.0 diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index dd2b0d8a8..c113d3ac9 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -1,5 +1,5 @@ # core test requirements -urllib3<3 +urllib3>=2,<3,!=2.2.0 flake8 mypy types-cachetools diff --git a/tools/source-package-verification.sh b/tools/source-package-verification.sh index bed6f0d44..dfb3dca50 100755 --- a/tools/source-package-verification.sh +++ b/tools/source-package-verification.sh @@ -5,6 +5,7 @@ # set -e +pip install --upgrade pip pip install -r requirements/requirements-tests-install.txt pip install -U build From 363f3e5d667e36ef316f2c4aaa43cc013c62d33c Mon Sep 17 00:00:00 2001 From: Naxin Date: Tue, 11 Nov 2025 23:59:45 -0500 Subject: [PATCH 09/25] add constraints back --- requirements/requirements-examples.txt | 5 +++-- requirements/requirements-rules.txt | 4 ++-- requirements/requirements-schemaregistry.txt | 5 +++-- requirements/requirements-tests.txt | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index ef7cc88fb..d47fe038b 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -7,7 +7,8 @@ six attrs cachetools httpx>=0.26 -authlib>=1.0.0 +httpcore>=1.0.9 +authlib>=1.6.5 fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" @@ -21,7 +22,7 @@ orjson >= 3.10 googleapis-common-protos protobuf -azure-identity +azure-identity>=1.25.1 azure-keyvault-keys boto3 cel-python>=0.4.0 diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index 51ba6ac1a..0ce39678d 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,6 +1,6 @@ -azure-identity +azure-identity>=1.25.1 azure-keyvault-keys -boto3>=1.35 +boto3>=1.40.69 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) google-re2<1.1.20251105 diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt index 2e947f466..2959f8276 100644 --- a/requirements/requirements-schemaregistry.txt +++ b/requirements/requirements-schemaregistry.txt @@ -1,5 +1,6 @@ attrs>=21.2.0 cachetools>=5.5.0 -certifi +certifi>=2025.10.5 httpx>=0.26 -authlib>=1.0.0 +httpcore>=1.0.9 +authlib>=1.6.5 diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index c113d3ac9..22fd912c8 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -12,4 +12,4 @@ respx pytest_cov pluggy<1.6.0 pytest-asyncio -async-timeout +async-timeout>=5.0.1 From 733ca545fe03ee8161db8c9e21652bb08bb98918 Mon Sep 17 00:00:00 2001 From: Naxin Date: Wed, 12 Nov 2025 13:04:17 -0500 Subject: [PATCH 10/25] upgrade to python 3.11 --- requirements/requirements-avro.txt | 2 +- requirements/requirements-examples.txt | 2 +- requirements/requirements-rules.txt | 4 ++-- requirements/requirements-schemaregistry.txt | 5 ++--- requirements/requirements-tests.txt | 4 ++-- tools/source-package-verification.sh | 2 +- 6 files changed, 9 insertions(+), 10 deletions(-) diff --git a/requirements/requirements-avro.txt b/requirements/requirements-avro.txt index c8a42740c..8eefd943d 100644 --- a/requirements/requirements-avro.txt +++ b/requirements/requirements-avro.txt @@ -1,4 +1,4 @@ fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" -requests~=2.32.0 +requests avro>=1.11.1,<2 diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index d47fe038b..5d203c70c 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -22,8 +22,8 @@ orjson >= 3.10 googleapis-common-protos protobuf -azure-identity>=1.25.1 azure-keyvault-keys +azure-identity boto3 cel-python>=0.4.0 google-auth diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index 0ce39678d..3d7be2bae 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,6 +1,6 @@ -azure-identity>=1.25.1 azure-keyvault-keys -boto3>=1.40.69 +azure-identity +boto3>=1.35 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) google-re2<1.1.20251105 diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt index 2959f8276..66dc1dced 100644 --- a/requirements/requirements-schemaregistry.txt +++ b/requirements/requirements-schemaregistry.txt @@ -1,6 +1,5 @@ attrs>=21.2.0 cachetools>=5.5.0 -certifi>=2025.10.5 httpx>=0.26 -httpcore>=1.0.9 -authlib>=1.6.5 +certifi +authlib>=1.0.0 diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index 22fd912c8..dd2b0d8a8 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -1,5 +1,5 @@ # core test requirements -urllib3>=2,<3,!=2.2.0 +urllib3<3 flake8 mypy types-cachetools @@ -12,4 +12,4 @@ respx pytest_cov pluggy<1.6.0 pytest-asyncio -async-timeout>=5.0.1 +async-timeout diff --git a/tools/source-package-verification.sh b/tools/source-package-verification.sh index dfb3dca50..ff91d94ac 100755 --- a/tools/source-package-verification.sh +++ b/tools/source-package-verification.sh @@ -54,7 +54,7 @@ if [[ $OS_NAME == linux && $ARCH == x64 ]]; then flake8 --exclude ./_venv,*_pb2.py,./build echo "Running mypy type checking ..." - mypy src/confluent_kafka + python3.11 -m mypy src/confluent_kafka pip install -r requirements/requirements-docs.txt make docs From eda5eee25fa1b5bdda3aa2017c072aca87432968 Mon Sep 17 00:00:00 2001 From: Naxin Date: Wed, 12 Nov 2025 13:08:29 -0500 Subject: [PATCH 11/25] cleanup ; --- requirements/requirements-examples.txt | 7 +++---- requirements/requirements-rules.txt | 2 +- requirements/requirements-schemaregistry.txt | 2 +- requirements/requirements-tests.txt | 2 +- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/requirements/requirements-examples.txt b/requirements/requirements-examples.txt index 5d203c70c..e4d6dd295 100644 --- a/requirements/requirements-examples.txt +++ b/requirements/requirements-examples.txt @@ -7,12 +7,11 @@ six attrs cachetools httpx>=0.26 -httpcore>=1.0.9 -authlib>=1.6.5 +authlib>=1.0.0 fastavro < 1.8.0; python_version == "3.7" fastavro < 2; python_version > "3.7" -requests~=2.32.0 +requests avro>=1.11.1,<2 pyrsistent @@ -22,8 +21,8 @@ orjson >= 3.10 googleapis-common-protos protobuf -azure-keyvault-keys azure-identity +azure-keyvault-keys boto3 cel-python>=0.4.0 google-auth diff --git a/requirements/requirements-rules.txt b/requirements/requirements-rules.txt index 3d7be2bae..51ba6ac1a 100644 --- a/requirements/requirements-rules.txt +++ b/requirements/requirements-rules.txt @@ -1,5 +1,5 @@ -azure-keyvault-keys azure-identity +azure-keyvault-keys boto3>=1.35 cel-python>=0.4.0 # Pin google-re2 to last version with Python 3.9 wheels (see https://pypi.org/project/google-re2/1.1.20251105/#files) diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt index 66dc1dced..2e947f466 100644 --- a/requirements/requirements-schemaregistry.txt +++ b/requirements/requirements-schemaregistry.txt @@ -1,5 +1,5 @@ attrs>=21.2.0 cachetools>=5.5.0 -httpx>=0.26 certifi +httpx>=0.26 authlib>=1.0.0 diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index dd2b0d8a8..022c7a8c2 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -3,7 +3,7 @@ urllib3<3 flake8 mypy types-cachetools -types-requests~=2.32.0 +types-requests orjson pytest pytest-timeout From 9744e2eccd169464601c0224dfda324675711bfc Mon Sep 17 00:00:00 2001 From: Naxin Date: Wed, 12 Nov 2025 13:23:51 -0500 Subject: [PATCH 12/25] upgrade semaphore builds to use 3.11 --- .semaphore/semaphore.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.semaphore/semaphore.yml b/.semaphore/semaphore.yml index 41077abf9..9819e1ee9 100644 --- a/.semaphore/semaphore.yml +++ b/.semaphore/semaphore.yml @@ -196,14 +196,14 @@ blocks: jobs: - name: Build and Tests with 'classic' group protocol commands: - - sem-version python 3.9 + - sem-version python 3.11 # use a virtualenv - python3 -m venv _venv && source _venv/bin/activate - chmod u+r+x tools/source-package-verification.sh - tools/source-package-verification.sh - name: Build and Tests with 'consumer' group protocol commands: - - sem-version python 3.9 + - sem-version python 3.11 - sem-version java 17 # use a virtualenv - python3 -m venv _venv && source _venv/bin/activate @@ -212,7 +212,7 @@ blocks: - tools/source-package-verification.sh - name: Build, Test, and Report coverage commands: - - sem-version python 3.9 + - sem-version python 3.11 # use a virtualenv - python3 -m venv _venv && source _venv/bin/activate - chmod u+r+x tools/source-package-verification.sh @@ -236,7 +236,7 @@ blocks: jobs: - name: Build commands: - - sem-version python 3.9 + - sem-version python 3.11 # use a virtualenv - python3 -m venv _venv && source _venv/bin/activate - chmod u+r+x tools/source-package-verification.sh @@ -255,7 +255,7 @@ blocks: jobs: - name: Build commands: - - sem-version python 3.9 + - sem-version python 3.11 # use a virtualenv - python3 -m venv _venv && source _venv/bin/activate - chmod u+r+x tools/source-package-verification.sh @@ -274,7 +274,7 @@ blocks: jobs: - name: Build commands: - - sem-version python 3.9 + - sem-version python 3.11 # use a virtualenv - python3 -m venv _venv && source _venv/bin/activate - chmod u+r+x tools/source-package-verification.sh @@ -301,7 +301,7 @@ blocks: - name: Build and Tests commands: # Setup Python environment - - sem-version python 3.9 + - sem-version python 3.11 - python3 -m venv _venv && source _venv/bin/activate # Install ducktape framework and additional dependencies From 35850b2ea6bff36138d7ac2352330d640d405c0d Mon Sep 17 00:00:00 2001 From: Naxin Date: Wed, 12 Nov 2025 13:40:12 -0500 Subject: [PATCH 13/25] one new type error --- src/confluent_kafka/deserializing_consumer.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/confluent_kafka/deserializing_consumer.py b/src/confluent_kafka/deserializing_consumer.py index 324239f25..07c99bcdc 100644 --- a/src/confluent_kafka/deserializing_consumer.py +++ b/src/confluent_kafka/deserializing_consumer.py @@ -106,7 +106,11 @@ def poll(self, timeout: float = -1) -> Optional[Message]: if error is not None: raise ConsumeError(error, kafka_message=msg) - ctx = SerializationContext(msg.topic(), MessageField.VALUE, msg.headers()) + topic = msg.topic() + if topic is None: + raise TypeError("Message topic is None") + ctx = SerializationContext(topic, MessageField.VALUE, msg.headers()) + value = msg.value() if self._value_deserializer is not None: try: From 214a75cbdb30a4123c6a83b841a039175b17c62a Mon Sep 17 00:00:00 2001 From: Naxin Date: Wed, 12 Nov 2025 13:58:33 -0500 Subject: [PATCH 14/25] whitespace --- src/confluent_kafka/deserializing_consumer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/confluent_kafka/deserializing_consumer.py b/src/confluent_kafka/deserializing_consumer.py index 07c99bcdc..12b030b7f 100644 --- a/src/confluent_kafka/deserializing_consumer.py +++ b/src/confluent_kafka/deserializing_consumer.py @@ -110,7 +110,7 @@ def poll(self, timeout: float = -1) -> Optional[Message]: if topic is None: raise TypeError("Message topic is None") ctx = SerializationContext(topic, MessageField.VALUE, msg.headers()) - + value = msg.value() if self._value_deserializer is not None: try: From 9fc0e054aa889aa409c36a80a3ab43ed70622f05 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Wed, 19 Nov 2025 14:47:11 -0800 Subject: [PATCH 15/25] Resolving setup issues --- DEVELOPER.md | 5 +- docs/conf.py | 31 +- examples/protobuf/user_pb2.py | 15 +- pyproject.toml | 1 + src/confluent_kafka/cimpl.pyi | 164 ++++------ .../_sync/schema_registry_client.py | 2 +- src/confluent_kafka/src/Admin.c | 12 +- .../data/proto/DependencyTestProto_pb2.py | 34 ++- .../data/proto/NestedTestProto_pb2.py | 57 ++-- .../data/proto/PublicTestProto_pb2.py | 15 +- .../data/proto/SInt32Value_pb2.py | 19 +- .../data/proto/SInt64Value_pb2.py | 19 +- .../data/proto/TestProto_pb2.py | 17 +- .../data/proto/common_proto_pb2.py | 41 +-- .../data/proto/exampleProtoCriteo_pb2.py | 41 ++- .../data/proto/metadata_proto_pb2.py | 279 +++++++++--------- tox.ini | 8 + 17 files changed, 385 insertions(+), 375 deletions(-) diff --git a/DEVELOPER.md b/DEVELOPER.md index 9bc1d8c6b..88fef7810 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -212,8 +212,11 @@ tox -e black,isort # Check linting tox -e flake8 +# Check typing +tox -e mypy + # Run all formatting and linting checks -tox -e black,isort,flake8 +tox -e black,isort,flake8,mypy ``` ## Documentation build diff --git a/docs/conf.py b/docs/conf.py index 8b2701fd1..5010711af 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,6 +13,7 @@ # serve to show the default. import os + try: import tomllib except ImportError: @@ -39,6 +40,7 @@ def _read_version_from_pyproject(pyproject_path=None): elif "tool" in data and "poetry" in data["tool"] and "version" in data["tool"]["poetry"]: return data["tool"]["poetry"]["version"] + # -- General configuration ------------------------------------------------ @@ -64,12 +66,7 @@ def _read_version_from_pyproject(pyproject_path=None): # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode' -] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -212,10 +209,8 @@ def _read_version_from_pyproject(pyproject_path=None): latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', } @@ -224,8 +219,7 @@ def _read_version_from_pyproject(pyproject_path=None): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'confluent-kafka.tex', u'confluent-kafka Documentation', - u'Magnus Edenhill', 'manual'), + ('index', 'confluent-kafka.tex', u'confluent-kafka Documentation', u'Magnus Edenhill', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -253,10 +247,7 @@ def _read_version_from_pyproject(pyproject_path=None): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'confluent-kafka', u'confluent-kafka Documentation', - [u'Magnus Edenhill'], 1) -] +man_pages = [('index', 'confluent-kafka', u'confluent-kafka Documentation', [u'Magnus Edenhill'], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -268,9 +259,15 @@ def _read_version_from_pyproject(pyproject_path=None): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'confluent-kafka', u'confluent-kafka Documentation', - u'Magnus Edenhill', 'confluent-kafka', 'One line description of project.', - 'Miscellaneous'), + ( + 'index', + 'confluent-kafka', + u'confluent-kafka Documentation', + u'Magnus Edenhill', + 'confluent-kafka', + 'One line description of project.', + 'Miscellaneous', + ), ] # Documents to append as an appendix to all manuals. diff --git a/examples/protobuf/user_pb2.py b/examples/protobuf/user_pb2.py index e113ac76c..68872e00e 100644 --- a/examples/protobuf/user_pb2.py +++ b/examples/protobuf/user_pb2.py @@ -6,6 +6,7 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -14,15 +15,17 @@ from confluent_kafka.schema_registry.confluent import meta_pb2 as confluent_dot_meta__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nuser.proto\x1a\x14\x63onfluent/meta.proto\"O\n\x04User\x12\x16\n\x04name\x18\x01 \x01(\tB\x08\x82\x44\x05\x1a\x03PII\x12\x17\n\x0f\x66\x61vorite_number\x18\x02 \x01(\x03\x12\x16\n\x0e\x66\x61vorite_color\x18\x03 \x01(\tb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\nuser.proto\x1a\x14\x63onfluent/meta.proto\"O\n\x04User\x12\x16\n\x04name\x18\x01 \x01(\tB\x08\x82\x44\x05\x1a\x03PII\x12\x17\n\x0f\x66\x61vorite_number\x18\x02 \x01(\x03\x12\x16\n\x0e\x66\x61vorite_color\x18\x03 \x01(\tb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'user_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _USER.fields_by_name['name']._options = None - _USER.fields_by_name['name']._serialized_options = b'\202D\005\032\003PII' - _USER._serialized_start=36 - _USER._serialized_end=115 + DESCRIPTOR._options = None + _USER.fields_by_name['name']._options = None + _USER.fields_by_name['name']._serialized_options = b'\202D\005\032\003PII' + _USER._serialized_start = 36 + _USER._serialized_end = 115 # @@protoc_insertion_point(module_scope) diff --git a/pyproject.toml b/pyproject.toml index 18e29cd2d..3cf400bf2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,6 +87,7 @@ skip = [ "dist", "tmp-build", "tmp-KafkaCluster", + ".tox", ] skip_glob = [ "*_pb2.py", diff --git a/src/confluent_kafka/cimpl.pyi b/src/confluent_kafka/cimpl.pyi index a5ee21b80..adc8e3f41 100644 --- a/src/confluent_kafka/cimpl.pyi +++ b/src/confluent_kafka/cimpl.pyi @@ -34,9 +34,10 @@ TODO: Consider migrating to Cython in the future to eliminate this dual maintenance burden and get type hints directly from the implementation. """ -from typing import Any, Optional, Callable, List, Tuple, Dict, Union, overload -from typing_extensions import Self, Literal import builtins +from typing import Any, Callable, Dict, List, Optional, Tuple, Union, overload + +from typing_extensions import Literal, Self from confluent_kafka.admin._metadata import ClusterMetadata, GroupMetadata @@ -76,11 +77,19 @@ class KafkaException(Exception): args: Tuple[Any, ...] class Message: - def __init__(self, topic: Optional[str] = ..., partition: Optional[int] = ..., offset: Optional[int] = ..., - key: Optional[bytes] = ..., value: Optional[bytes] = ..., - headers: Optional[HeadersType] = ..., error: Optional[KafkaError] = ..., - timestamp: Optional[Tuple[int, int]] = ..., latency: Optional[float] = ..., - leader_epoch: Optional[int] = ...) -> None: ... + def __init__( + self, + topic: Optional[str] = ..., + partition: Optional[int] = ..., + offset: Optional[int] = ..., + key: Optional[bytes] = ..., + value: Optional[bytes] = ..., + headers: Optional[HeadersType] = ..., + error: Optional[KafkaError] = ..., + timestamp: Optional[Tuple[int, int]] = ..., + latency: Optional[float] = ..., + leader_epoch: Optional[int] = ..., + ) -> None: ... def topic(self) -> Optional[str]: ... def partition(self) -> Optional[int]: ... def offset(self) -> Optional[int]: ... @@ -129,7 +138,7 @@ class Producer: callback: Optional[DeliveryCallback] = None, on_delivery: Optional[DeliveryCallback] = None, timestamp: int = 0, - headers: Optional[HeadersType] = None + headers: Optional[HeadersType] = None, ) -> None: ... def produce_batch( self, @@ -137,25 +146,17 @@ class Producer: messages: List[Dict[str, Any]], partition: int = -1, callback: Optional[DeliveryCallback] = None, - on_delivery: Optional[DeliveryCallback] = None + on_delivery: Optional[DeliveryCallback] = None, ) -> int: ... def poll(self, timeout: float = -1) -> int: ... def flush(self, timeout: float = -1) -> int: ... - def purge( - self, - in_queue: bool = True, - in_flight: bool = True, - blocking: bool = True - ) -> None: ... + def purge(self, in_queue: bool = True, in_flight: bool = True, blocking: bool = True) -> None: ... def abort_transaction(self, timeout: float = -1) -> None: ... def begin_transaction(self) -> None: ... def commit_transaction(self, timeout: float = -1) -> None: ... def init_transactions(self, timeout: float = -1) -> None: ... def send_offsets_to_transaction( - self, - positions: List[TopicPartition], - group_metadata: Any, # ConsumerGroupMetadata - timeout: float = -1 + self, positions: List[TopicPartition], group_metadata: Any, timeout: float = -1 # ConsumerGroupMetadata ) -> None: ... def list_topics(self, topic: Optional[str] = None, timeout: float = -1) -> Any: ... def set_sasl_credentials(self, username: str, password: str) -> None: ... @@ -171,7 +172,7 @@ class Consumer: topics: List[str], on_assign: Optional[RebalanceCallback] = None, on_revoke: Optional[RebalanceCallback] = None, - on_lost: Optional[RebalanceCallback] = None + on_lost: Optional[RebalanceCallback] = None, ) -> None: ... def unsubscribe(self) -> None: ... def poll(self, timeout: float = -1) -> Optional[Message]: ... @@ -184,44 +185,31 @@ class Consumer: self, message: Optional['Message'] = None, offsets: Optional[List[TopicPartition]] = None, - asynchronous: Literal[True] = True + asynchronous: Literal[True] = True, ) -> None: ... @overload def commit( self, message: Optional['Message'] = None, offsets: Optional[List[TopicPartition]] = None, - asynchronous: Literal[False] = False + asynchronous: Literal[False] = False, ) -> List[TopicPartition]: ... def get_watermark_offsets( - self, - partition: TopicPartition, - timeout: float = -1, - cached: bool = False + self, partition: TopicPartition, timeout: float = -1, cached: bool = False ) -> Tuple[int, int]: ... def pause(self, partitions: List[TopicPartition]) -> None: ... def resume(self, partitions: List[TopicPartition]) -> None: ... def seek(self, partition: TopicPartition) -> None: ... def position(self, partitions: List[TopicPartition]) -> List[TopicPartition]: ... def store_offsets( - self, - message: Optional['Message'] = None, - offsets: Optional[List[TopicPartition]] = None + self, message: Optional['Message'] = None, offsets: Optional[List[TopicPartition]] = None ) -> None: ... - def committed( - self, - partitions: List[TopicPartition], - timeout: float = -1 - ) -> List[TopicPartition]: ... + def committed(self, partitions: List[TopicPartition], timeout: float = -1) -> List[TopicPartition]: ... def close(self) -> None: ... def __enter__(self) -> "Consumer": ... def __exit__(self, exc_type: Any, exc_value: Any, exc_traceback: Any) -> Optional[bool]: ... def list_topics(self, topic: Optional[str] = None, timeout: float = -1) -> Any: ... - def offsets_for_times( - self, - partitions: List[TopicPartition], - timeout: float = -1 - ) -> List[TopicPartition]: ... + def offsets_for_times(self, partitions: List[TopicPartition], timeout: float = -1) -> List[TopicPartition]: ... def incremental_assign(self, partitions: List[TopicPartition]) -> None: ... def incremental_unassign(self, partitions: List[TopicPartition]) -> None: ... def consumer_group_metadata(self) -> Any: ... # ConsumerGroupMetadata @@ -239,14 +227,10 @@ class _AdminClientImpl: future: Any, validate_only: bool = False, request_timeout: float = -1, - operation_timeout: float = -1 + operation_timeout: float = -1, ) -> None: ... def delete_topics( - self, - topics: List[str], - future: Any, - request_timeout: float = -1, - operation_timeout: float = -1 + self, topics: List[str], future: Any, request_timeout: float = -1, operation_timeout: float = -1 ) -> None: ... def create_partitions( self, @@ -254,88 +238,54 @@ class _AdminClientImpl: future: Any, validate_only: bool = False, request_timeout: float = -1, - operation_timeout: float = -1 + operation_timeout: float = -1, ) -> None: ... def describe_topics( self, future: Any, topic_names: List[str], request_timeout: float = -1, - include_authorized_operations: bool = False + include_authorized_operations: bool = False, ) -> None: ... def describe_cluster( - self, - future: Any, - request_timeout: float = -1, - include_authorized_operations: bool = False + self, future: Any, request_timeout: float = -1, include_authorized_operations: bool = False ) -> None: ... - def list_topics( - self, - topic: Optional[str] = None, - timeout: float = -1 - ) -> ClusterMetadata: ... - def list_groups( - self, - group: Optional[str] = None, - timeout: float = -1 - ) -> List[GroupMetadata]: ... + def list_topics(self, topic: Optional[str] = None, timeout: float = -1) -> ClusterMetadata: ... + def list_groups(self, group: Optional[str] = None, timeout: float = -1) -> List[GroupMetadata]: ... def describe_consumer_groups( self, group_ids: List[str], future: Any, request_timeout: float = -1, - include_authorized_operations: bool = False + include_authorized_operations: bool = False, ) -> None: ... def list_consumer_groups( self, future: Any, states_int: Optional[List[int]] = None, types_int: Optional[List[int]] = None, - request_timeout: float = -1 + request_timeout: float = -1, ) -> None: ... def list_consumer_group_offsets( self, request: Any, # ConsumerGroupTopicPartitions future: Any, require_stable: bool = False, - request_timeout: float = -1 + request_timeout: float = -1, ) -> None: ... def alter_consumer_group_offsets( - self, - requests: Any, # List[ConsumerGroupTopicPartitions] - future: Any, - request_timeout: float = -1 - ) -> None: ... - def delete_consumer_groups( - self, - group_ids: List[str], - future: Any, - request_timeout: float = -1 - ) -> None: ... - def create_acls( - self, - acls: List[Any], # List[AclBinding] - future: Any, - request_timeout: float = -1 + self, requests: Any, future: Any, request_timeout: float = -1 # List[ConsumerGroupTopicPartitions] ) -> None: ... + def delete_consumer_groups(self, group_ids: List[str], future: Any, request_timeout: float = -1) -> None: ... + def create_acls(self, acls: List[Any], future: Any, request_timeout: float = -1) -> None: ... # List[AclBinding] def describe_acls( - self, - acl_binding_filter: Any, # AclBindingFilter - future: Any, - request_timeout: float = -1 + self, acl_binding_filter: Any, future: Any, request_timeout: float = -1 # AclBindingFilter ) -> None: ... def delete_acls( - self, - acls: List[Any], # List[AclBindingFilter] - future: Any, - request_timeout: float = -1 + self, acls: List[Any], future: Any, request_timeout: float = -1 # List[AclBindingFilter] ) -> None: ... def describe_configs( - self, - resources: List[Any], # List[ConfigResource] - future: Any, - request_timeout: float = -1, - broker: int = -1 + self, resources: List[Any], future: Any, request_timeout: float = -1, broker: int = -1 # List[ConfigResource] ) -> None: ... def alter_configs( self, @@ -343,7 +293,7 @@ class _AdminClientImpl: future: Any, validate_only: bool = False, request_timeout: float = -1, - broker: int = -1 + broker: int = -1, ) -> None: ... def incremental_alter_configs( self, @@ -351,33 +301,27 @@ class _AdminClientImpl: future: Any, validate_only: bool = False, request_timeout: float = -1, - broker: int = -1 + broker: int = -1, ) -> None: ... def describe_user_scram_credentials( - self, - users: Optional[List[str]], - future: Any, - request_timeout: float = -1 + self, users: Optional[List[str]], future: Any, request_timeout: float = -1 ) -> None: ... def alter_user_scram_credentials( - self, - alterations: List[Any], # List[UserScramCredentialAlteration] - future: Any, - request_timeout: float = -1 + self, alterations: List[Any], future: Any, request_timeout: float = -1 # List[UserScramCredentialAlteration] ) -> None: ... def list_offsets( self, topic_partitions: List[TopicPartition], future: Any, isolation_level_value: Optional[int] = None, - request_timeout: float = -1 + request_timeout: float = -1, ) -> None: ... def delete_records( self, topic_partition_offsets: List[TopicPartition], future: Any, request_timeout: float = -1, - operation_timeout: float = -1 + operation_timeout: float = -1, ) -> None: ... def elect_leaders( self, @@ -385,7 +329,7 @@ class _AdminClientImpl: partitions: Optional[List[TopicPartition]], future: Any, request_timeout: float = -1, - operation_timeout: float = -1 + operation_timeout: float = -1, ) -> None: ... def poll(self, timeout: float = -1) -> int: ... def set_sasl_credentials(self, username: str, password: str) -> None: ... @@ -397,7 +341,7 @@ class NewTopic: num_partitions: int = -1, replication_factor: int = -1, replica_assignment: Optional[List[List[int]]] = None, - config: Optional[Dict[str, str]] = None + config: Optional[Dict[str, str]] = None, ) -> None: ... topic: str num_partitions: int @@ -415,10 +359,7 @@ class NewTopic: class NewPartitions: def __init__( - self, - topic: str, - new_total_count: int, - replica_assignment: Optional[List[List[int]]] = None + self, topic: str, new_total_count: int, replica_assignment: Optional[List[List[int]]] = None ) -> None: ... topic: str new_total_count: int @@ -436,7 +377,6 @@ class NewPartitions: def libversion() -> Tuple[str, int]: ... def version() -> Tuple[str, int]: ... - def murmur2(key: bytes, partition_count: int) -> int: ... def consistent(key: bytes, partition_count: int) -> int: ... def fnv1a(key: bytes, partition_count: int) -> int: ... diff --git a/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py b/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py index 5c357060b..7f1f3cb45 100644 --- a/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py +++ b/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py @@ -705,7 +705,7 @@ def register_schema_full_response( `POST Subject Version API Reference `_ """ # noqa: E501 - schema_id = self._cache.get_id_by_schema(subject_name, schema) + schema_id: Optional[int] = self._cache.get_id_by_schema(subject_name, schema) if schema_id is not None: result = self._cache.get_schema_by_id(subject_name, schema_id) if result is not None: diff --git a/src/confluent_kafka/src/Admin.c b/src/confluent_kafka/src/Admin.c index 06953e867..e56a3bbf5 100644 --- a/src/confluent_kafka/src/Admin.c +++ b/src/confluent_kafka/src/Admin.c @@ -99,12 +99,12 @@ struct Admin_options { * Make sure this is kept up to date with Admin_options above. */ #define Admin_options_INITIALIZER \ { \ - Admin_options_def_int, Admin_options_def_float, \ - Admin_options_def_float, Admin_options_def_int, \ - Admin_options_def_int, Admin_options_def_int, \ - Admin_options_def_int, Admin_options_def_ptr, \ - Admin_options_def_cnt, Admin_options_def_ptr, \ - Admin_options_def_cnt, \ + Admin_options_def_int, Admin_options_def_float, \ + Admin_options_def_float, Admin_options_def_int, \ + Admin_options_def_int, Admin_options_def_int, \ + Admin_options_def_int, Admin_options_def_ptr, \ + Admin_options_def_cnt, Admin_options_def_ptr, \ + Admin_options_def_cnt, \ } #define Admin_options_is_set_int(v) ((v) != Admin_options_def_int) diff --git a/tests/integration/schema_registry/data/proto/DependencyTestProto_pb2.py b/tests/integration/schema_registry/data/proto/DependencyTestProto_pb2.py index 646cabb52..60a4689cf 100644 --- a/tests/integration/schema_registry/data/proto/DependencyTestProto_pb2.py +++ b/tests/integration/schema_registry/data/proto/DependencyTestProto_pb2.py @@ -6,27 +6,41 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from tests.integration.schema_registry.data.proto import NestedTestProto_pb2 as tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_NestedTestProto__pb2 -from tests.integration.schema_registry.data.proto import PublicTestProto_pb2 as tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_PublicTestProto__pb2 +from tests.integration.schema_registry.data.proto import ( + NestedTestProto_pb2 as tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_NestedTestProto__pb2, +) +from tests.integration.schema_registry.data.proto import ( + PublicTestProto_pb2 as tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_PublicTestProto__pb2, +) + try: - tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 = tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_PublicTestProto__pb2.tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 + tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 = ( + tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_PublicTestProto__pb2.tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 + ) except AttributeError: - tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 = tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_PublicTestProto__pb2.tests.integration.schema_registry.data.proto.TestProto_pb2 + tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 = ( + tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_PublicTestProto__pb2.tests.integration.schema_registry.data.proto.TestProto_pb2 + ) -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nFtests/integration/schema_registry/data/proto/DependencyTestProto.proto\x12$tests.integration.serialization.data\x1a\x42tests/integration/schema_registry/data/proto/NestedTestProto.proto\x1a\x42tests/integration/schema_registry/data/proto/PublicTestProto.proto\"\x98\x01\n\x11\x44\x65pendencyMessage\x12K\n\x0enested_message\x18\x01 \x01(\x0b\x32\x33.tests.integration.serialization.data.NestedMessage\x12\x11\n\tis_active\x18\x02 \x01(\x08\x12#\n\rtest_messsage\x18\x03 \x01(\x0b\x32\x0c.TestMessageB.\n,io.confluent.kafka.serializers.protobuf.testb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\nFtests/integration/schema_registry/data/proto/DependencyTestProto.proto\x12$tests.integration.serialization.data\x1a\x42tests/integration/schema_registry/data/proto/NestedTestProto.proto\x1a\x42tests/integration/schema_registry/data/proto/PublicTestProto.proto\"\x98\x01\n\x11\x44\x65pendencyMessage\x12K\n\x0enested_message\x18\x01 \x01(\x0b\x32\x33.tests.integration.serialization.data.NestedMessage\x12\x11\n\tis_active\x18\x02 \x01(\x08\x12#\n\rtest_messsage\x18\x03 \x01(\x0b\x32\x0c.TestMessageB.\n,io.confluent.kafka.serializers.protobuf.testb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tests.integration.schema_registry.data.proto.DependencyTestProto_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, 'tests.integration.schema_registry.data.proto.DependencyTestProto_pb2', globals() +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.test' - _DEPENDENCYMESSAGE._serialized_start=249 - _DEPENDENCYMESSAGE._serialized_end=401 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.test' + _DEPENDENCYMESSAGE._serialized_start = 249 + _DEPENDENCYMESSAGE._serialized_end = 401 # @@protoc_insertion_point(module_scope) diff --git a/tests/integration/schema_registry/data/proto/NestedTestProto_pb2.py b/tests/integration/schema_registry/data/proto/NestedTestProto_pb2.py index 1a7cda3b4..2246c4bc2 100644 --- a/tests/integration/schema_registry/data/proto/NestedTestProto_pb2.py +++ b/tests/integration/schema_registry/data/proto/NestedTestProto_pb2.py @@ -6,6 +6,7 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -14,34 +15,38 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nBtests/integration/schema_registry/data/proto/NestedTestProto.proto\x12$tests.integration.serialization.data\x1a\x1fgoogle/protobuf/timestamp.proto\"\x8c\x01\n\x06UserId\x12\x17\n\rkafka_user_id\x18\x01 \x01(\tH\x00\x12\x17\n\rother_user_id\x18\x02 \x01(\x05H\x00\x12\x45\n\nanother_id\x18\x03 \x01(\x0b\x32/.tests.integration.serialization.data.MessageIdH\x00\x42\t\n\x07user_id\"\x17\n\tMessageId\x12\n\n\x02id\x18\x01 \x01(\t\"R\n\x0b\x43omplexType\x12\x10\n\x06one_id\x18\x01 \x01(\tH\x00\x12\x12\n\x08other_id\x18\x02 \x01(\x05H\x00\x12\x11\n\tis_active\x18\x03 \x01(\x08\x42\n\n\x08some_val\"\xd0\x04\n\rNestedMessage\x12=\n\x07user_id\x18\x01 \x01(\x0b\x32,.tests.integration.serialization.data.UserId\x12\x11\n\tis_active\x18\x02 \x01(\x08\x12\x1a\n\x12\x65xperiments_active\x18\x03 \x03(\t\x12<\n\x06status\x18\x05 \x01(\x0e\x32,.tests.integration.serialization.data.Status\x12G\n\x0c\x63omplex_type\x18\x06 \x01(\x0b\x32\x31.tests.integration.serialization.data.ComplexType\x12R\n\x08map_type\x18\x07 \x03(\x0b\x32@.tests.integration.serialization.data.NestedMessage.MapTypeEntry\x12O\n\x05inner\x18\x08 \x01(\x0b\x32@.tests.integration.serialization.data.NestedMessage.InnerMessage\x1a.\n\x0cMapTypeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a/\n\x0cInnerMessage\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x0f\n\x03ids\x18\x02 \x03(\x05\x42\x02\x10\x01\"(\n\tInnerEnum\x12\x08\n\x04ZERO\x10\x00\x12\r\n\tALSO_ZERO\x10\x00\x1a\x02\x10\x01J\x04\x08\x0e\x10\x0fJ\x04\x08\x0f\x10\x10J\x04\x08\t\x10\x0cR\x03\x66ooR\x03\x62\x61r*\"\n\x06Status\x12\n\n\x06\x41\x43TIVE\x10\x00\x12\x0c\n\x08INACTIVE\x10\x01\x42\x41\n,io.confluent.kafka.serializers.protobuf.testB\x0fNestedTestProtoP\x00\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\nBtests/integration/schema_registry/data/proto/NestedTestProto.proto\x12$tests.integration.serialization.data\x1a\x1fgoogle/protobuf/timestamp.proto\"\x8c\x01\n\x06UserId\x12\x17\n\rkafka_user_id\x18\x01 \x01(\tH\x00\x12\x17\n\rother_user_id\x18\x02 \x01(\x05H\x00\x12\x45\n\nanother_id\x18\x03 \x01(\x0b\x32/.tests.integration.serialization.data.MessageIdH\x00\x42\t\n\x07user_id\"\x17\n\tMessageId\x12\n\n\x02id\x18\x01 \x01(\t\"R\n\x0b\x43omplexType\x12\x10\n\x06one_id\x18\x01 \x01(\tH\x00\x12\x12\n\x08other_id\x18\x02 \x01(\x05H\x00\x12\x11\n\tis_active\x18\x03 \x01(\x08\x42\n\n\x08some_val\"\xd0\x04\n\rNestedMessage\x12=\n\x07user_id\x18\x01 \x01(\x0b\x32,.tests.integration.serialization.data.UserId\x12\x11\n\tis_active\x18\x02 \x01(\x08\x12\x1a\n\x12\x65xperiments_active\x18\x03 \x03(\t\x12<\n\x06status\x18\x05 \x01(\x0e\x32,.tests.integration.serialization.data.Status\x12G\n\x0c\x63omplex_type\x18\x06 \x01(\x0b\x32\x31.tests.integration.serialization.data.ComplexType\x12R\n\x08map_type\x18\x07 \x03(\x0b\x32@.tests.integration.serialization.data.NestedMessage.MapTypeEntry\x12O\n\x05inner\x18\x08 \x01(\x0b\x32@.tests.integration.serialization.data.NestedMessage.InnerMessage\x1a.\n\x0cMapTypeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a/\n\x0cInnerMessage\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x0f\n\x03ids\x18\x02 \x03(\x05\x42\x02\x10\x01\"(\n\tInnerEnum\x12\x08\n\x04ZERO\x10\x00\x12\r\n\tALSO_ZERO\x10\x00\x1a\x02\x10\x01J\x04\x08\x0e\x10\x0fJ\x04\x08\x0f\x10\x10J\x04\x08\t\x10\x0cR\x03\x66ooR\x03\x62\x61r*\"\n\x06Status\x12\n\n\x06\x41\x43TIVE\x10\x00\x12\x0c\n\x08INACTIVE\x10\x01\x42\x41\n,io.confluent.kafka.serializers.protobuf.testB\x0fNestedTestProtoP\x00\x62\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tests.integration.schema_registry.data.proto.NestedTestProto_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, 'tests.integration.schema_registry.data.proto.NestedTestProto_pb2', globals() +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.testB\017NestedTestProtoP\000' - _NESTEDMESSAGE_MAPTYPEENTRY._options = None - _NESTEDMESSAGE_MAPTYPEENTRY._serialized_options = b'8\001' - _NESTEDMESSAGE_INNERMESSAGE.fields_by_name['ids']._options = None - _NESTEDMESSAGE_INNERMESSAGE.fields_by_name['ids']._serialized_options = b'\020\001' - _NESTEDMESSAGE_INNERENUM._options = None - _NESTEDMESSAGE_INNERENUM._serialized_options = b'\020\001' - _STATUS._serialized_start=988 - _STATUS._serialized_end=1022 - _USERID._serialized_start=142 - _USERID._serialized_end=282 - _MESSAGEID._serialized_start=284 - _MESSAGEID._serialized_end=307 - _COMPLEXTYPE._serialized_start=309 - _COMPLEXTYPE._serialized_end=391 - _NESTEDMESSAGE._serialized_start=394 - _NESTEDMESSAGE._serialized_end=986 - _NESTEDMESSAGE_MAPTYPEENTRY._serialized_start=821 - _NESTEDMESSAGE_MAPTYPEENTRY._serialized_end=867 - _NESTEDMESSAGE_INNERMESSAGE._serialized_start=869 - _NESTEDMESSAGE_INNERMESSAGE._serialized_end=916 - _NESTEDMESSAGE_INNERENUM._serialized_start=918 - _NESTEDMESSAGE_INNERENUM._serialized_end=958 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.testB\017NestedTestProtoP\000' + _NESTEDMESSAGE_MAPTYPEENTRY._options = None + _NESTEDMESSAGE_MAPTYPEENTRY._serialized_options = b'8\001' + _NESTEDMESSAGE_INNERMESSAGE.fields_by_name['ids']._options = None + _NESTEDMESSAGE_INNERMESSAGE.fields_by_name['ids']._serialized_options = b'\020\001' + _NESTEDMESSAGE_INNERENUM._options = None + _NESTEDMESSAGE_INNERENUM._serialized_options = b'\020\001' + _STATUS._serialized_start = 988 + _STATUS._serialized_end = 1022 + _USERID._serialized_start = 142 + _USERID._serialized_end = 282 + _MESSAGEID._serialized_start = 284 + _MESSAGEID._serialized_end = 307 + _COMPLEXTYPE._serialized_start = 309 + _COMPLEXTYPE._serialized_end = 391 + _NESTEDMESSAGE._serialized_start = 394 + _NESTEDMESSAGE._serialized_end = 986 + _NESTEDMESSAGE_MAPTYPEENTRY._serialized_start = 821 + _NESTEDMESSAGE_MAPTYPEENTRY._serialized_end = 867 + _NESTEDMESSAGE_INNERMESSAGE._serialized_start = 869 + _NESTEDMESSAGE_INNERMESSAGE._serialized_end = 916 + _NESTEDMESSAGE_INNERENUM._serialized_start = 918 + _NESTEDMESSAGE_INNERENUM._serialized_end = 958 # @@protoc_insertion_point(module_scope) diff --git a/tests/integration/schema_registry/data/proto/PublicTestProto_pb2.py b/tests/integration/schema_registry/data/proto/PublicTestProto_pb2.py index fe7d8ffe1..2568a5078 100644 --- a/tests/integration/schema_registry/data/proto/PublicTestProto_pb2.py +++ b/tests/integration/schema_registry/data/proto/PublicTestProto_pb2.py @@ -6,20 +6,27 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from tests.integration.schema_registry.data.proto import TestProto_pb2 as tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2 +from tests.integration.schema_registry.data.proto import ( + TestProto_pb2 as tests_dot_integration_dot_schema__registry_dot_data_dot_proto_dot_TestProto__pb2, +) from tests.integration.schema_registry.data.proto.TestProto_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nBtests/integration/schema_registry/data/proto/PublicTestProto.proto\x12$tests.integration.serialization.data\x1atests/integration/schema_registry/data/proto/SInt32Value.proto\"\x1c\n\x0bSInt32Value\x12\r\n\x05value\x18\x01 \x01(\x11\x42.\n,io.confluent.kafka.serializers.protobuf.testb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n>tests/integration/schema_registry/data/proto/SInt32Value.proto\"\x1c\n\x0bSInt32Value\x12\r\n\x05value\x18\x01 \x01(\x11\x42.\n,io.confluent.kafka.serializers.protobuf.testb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tests.integration.schema_registry.data.proto.SInt32Value_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, 'tests.integration.schema_registry.data.proto.SInt32Value_pb2', globals() +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.test' - _SINT32VALUE._serialized_start=66 - _SINT32VALUE._serialized_end=94 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.test' + _SINT32VALUE._serialized_start = 66 + _SINT32VALUE._serialized_end = 94 # @@protoc_insertion_point(module_scope) diff --git a/tests/integration/schema_registry/data/proto/SInt64Value_pb2.py b/tests/integration/schema_registry/data/proto/SInt64Value_pb2.py index 75be7ff67..667ff1da7 100644 --- a/tests/integration/schema_registry/data/proto/SInt64Value_pb2.py +++ b/tests/integration/schema_registry/data/proto/SInt64Value_pb2.py @@ -6,21 +6,24 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>tests/integration/schema_registry/data/proto/SInt64Value.proto\"\x1c\n\x0bSInt64Value\x12\r\n\x05value\x18\x01 \x01(\x12\x42.\n,io.confluent.kafka.serializers.protobuf.testb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n>tests/integration/schema_registry/data/proto/SInt64Value.proto\"\x1c\n\x0bSInt64Value\x12\r\n\x05value\x18\x01 \x01(\x12\x42.\n,io.confluent.kafka.serializers.protobuf.testb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tests.integration.schema_registry.data.proto.SInt64Value_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, 'tests.integration.schema_registry.data.proto.SInt64Value_pb2', globals() +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.test' - _SINT64VALUE._serialized_start=66 - _SINT64VALUE._serialized_end=94 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n,io.confluent.kafka.serializers.protobuf.test' + _SINT64VALUE._serialized_start = 66 + _SINT64VALUE._serialized_end = 94 # @@protoc_insertion_point(module_scope) diff --git a/tests/integration/schema_registry/data/proto/TestProto_pb2.py b/tests/integration/schema_registry/data/proto/TestProto_pb2.py index 04e6e67e7..ad65cbc51 100644 --- a/tests/integration/schema_registry/data/proto/TestProto_pb2.py +++ b/tests/integration/schema_registry/data/proto/TestProto_pb2.py @@ -6,6 +6,7 @@ from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -14,14 +15,18 @@ from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.Criteo.Glup.HDFSOptions.ImportOptions.Generator.BackupOptions\x12X\n\x0btranscoding\x18\x06 \x01(\x0b\x32\x43.Criteo.Glup.HDFSOptions.ImportOptions.Generator.TranscodingOptions\x12N\n\x06kacoha\x18\x07 \x01(\x0b\x32>.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KaCoHaOptions\x12R\n\x0b\x64\x65\x64uplicate\x18\x08 \x01(\x0b\x32=.Criteo.Glup.HDFSOptions.ImportOptions.Generator.DedupOptions\x12P\n\x07sampler\x18\t \x01(\x0b\x32?.Criteo.Glup.HDFSOptions.ImportOptions.Generator.SamplerOptions\x12V\n\ncomparator\x18\n \x01(\x0b\x32\x42.Criteo.Glup.HDFSOptions.ImportOptions.Generator.ComparatorOptions\x12\"\n\x02to\x18\xfa\x01 \x03(\x0b\x32\x15.Criteo.Glup.Location\x12\x12\n\tnamespace\x18\xfb\x01 \x01(\t\x12\x13\n\nstart_date\x18\xfd\x01 \x01(\t\x12\x12\n\tstop_date\x18\xfe\x01 \x01(\t\x12\x12\n\tignore_cn\x18\xff\x01 \x01(\x08\x1a\x9a\x01\n\x0c\x44\x65\x64upOptions\x12\x18\n\x10input_dataset_id\x18\x01 \x01(\t\x12\x1a\n\x12input_format_label\x18\x02 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x03 \x01(\t\x12\x1b\n\x13output_format_label\x18\x04 \x01(\t\x12\x1c\n\x14use_hippo_cuttle_job\x18\x05 \x01(\x08\x1au\n\x11Kafka2HdfsOptions\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65\x64uplicate\x18\x03 \x01(\x08\x12\x19\n\x11output_dataset_id\x18\x04 \x01(\t\x12\x1b\n\x13output_format_label\x18\x05 \x01(\tJ\x04\x08\x02\x10\x03\x1aK\n\x0cKacohaConfig\x12\x1b\n\x13partitions_per_task\x18\x01 \x01(\x05\x12\x1e\n\x16poll_buffer_size_bytes\x18\x02 \x01(\x05\x1a\x87\x01\n\x11KacohaConfigPerDc\x12#\n\x02\x64\x63\x18\x01 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12M\n\x06\x63onfig\x18\x02 \x01(\x0b\x32=.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KacohaConfig\x1a\x95\x02\n\rKaCoHaOptions\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65\x64uplicate\x18\x03 \x01(\x08\x12M\n\x06\x63onfig\x18\x04 \x01(\x0b\x32=.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KacohaConfig\x12\x1b\n\x13output_format_label\x18\x05 \x01(\t\x12Y\n\rconfig_per_dc\x18\x06 \x03(\x0b\x32\x42.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KacohaConfigPerDc\x1a<\n\x11\x44\x61taloaderOptions\x12\'\n\x08platform\x18\x01 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x1a\xf1\x01\n\x0bSyncOptions\x12#\n\x04\x66rom\x18\x01 \x01(\x0b\x32\x15.Criteo.Glup.Location\x12\x18\n\x10source_namespace\x18\x03 \x01(\t\x12(\n\tplatforms\x18\x06 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x12\x16\n\x0eis_backfilling\x18\x08 \x01(\x08\x12\x10\n\x08to_label\x18\t \x01(\t\x12\x15\n\rto_dataset_id\x18\n \x01(\t\x12\x18\n\x10with_backfilling\x18\x0b \x01(\x08\x12\x1e\n\x16is_scheduled_on_source\x18\x0c \x01(\x08\x1ax\n\rBackupOptions\x12#\n\x04\x66rom\x18\x01 \x01(\x0b\x32\x15.Criteo.Glup.Location\x12\x18\n\x10source_namespace\x18\x02 \x01(\t\x12(\n\tplatforms\x18\x03 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x1a\x83\x02\n\x12TranscodingOptions\x12\x18\n\x10input_dataset_id\x18\x01 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x02 \x01(\t\x12\x31\n\x0cinput_format\x18\x03 \x01(\x0e\x32\x1b.Criteo.Glup.HDFSDataFormat\x12\x32\n\routput_format\x18\x04 \x01(\x0e\x32\x1b.Criteo.Glup.HDFSDataFormat\x12\x1b\n\x13input_dataset_label\x18\x05 \x01(\t\x12\x1c\n\x14output_dataset_label\x18\x06 \x01(\t\x12\x16\n\x0eis_by_platform\x18\x07 \x01(\x08\x1a\x95\x01\n\x0eSamplerOptions\x12\x18\n\x10input_dataset_id\x18\x01 \x01(\t\x12\x1a\n\x12input_format_label\x18\x02 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x03 \x01(\t\x12\x1b\n\x13output_format_label\x18\x04 \x01(\t\x12\x15\n\rsampling_rate\x18\x05 \x01(\x02\x1a\xa7\x01\n\x11\x43omparatorOptions\x12\x17\n\x0fleft_dataset_id\x18\x01 \x01(\t\x12\x19\n\x11left_format_label\x18\x02 \x01(\t\x12\x18\n\x10right_dataset_id\x18\x03 \x01(\t\x12\x1a\n\x12right_format_label\x18\x04 \x01(\t\x12\x10\n\x08hostname\x18\x05 \x01(\t\x12\x16\n\x0eignored_fields\x18\x06 \x01(\t\x1a\x11\n\x0f\x45xternalOptions\"9\n\x18ProducerTransportOptions\x12\x0e\n\x06syslog\x18\x01 \x01(\x08\x12\r\n\x05kafka\x18\x02 \x01(\x08\"8\n\x0fPropertyOptions\x12\x10\n\x08valuable\x18\x01 \x01(\x08\x12\x13\n\x0bhigh_volume\x18\x02 \x01(\x08\"\xcb\x02\n\x0bGlupOptions\x12/\n\x05kafka\x18\x01 \x01(\x0b\x32 .Criteo.Glup.KafkaMessageOptions\x12&\n\x04hdfs\x18\x02 \x01(\x0b\x32\x18.Criteo.Glup.HDFSOptions\x12\x14\n\x0csampling_pct\x18\x03 \x01(\r\x12\x1c\n\x14preprod_sampling_pct\x18\x04 \x01(\r\x12%\n\x07\x64\x61taset\x18\x05 \x03(\x0b\x32\x14.Criteo.Glup.DataSet\x12\x1c\n\x14message_sampling_pct\x18\x06 \x01(\r\x12\x38\n\tproducers\x18\x07 \x01(\x0b\x32%.Criteo.Glup.ProducerTransportOptions\x12\x30\n\nproperties\x18\x08 \x01(\x0b\x32\x1c.Criteo.Glup.PropertyOptions\"\xb1\x01\n\x10GlupFieldOptions\x12\x0f\n\x07sampled\x18\x01 \x01(\x08\x12\x14\n\x0csampling_key\x18\x02 \x01(\x08\x12\x30\n\x11\x64isabled_platform\x18\x03 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x12\x18\n\x10should_clean_pii\x18\x04 \x01(\x08\x12\x18\n\x10pending_deletion\x18\x05 \x01(\x08\x12\x10\n\x08\x61\x64\x64\x65\x64_at\x18\x06 \x01(\t\")\n\x0bJsonMapping\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04skip\x18\x02 \x01(\x08\"4\n\tJsonAlias\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11use_enum_field_id\x18\x03 \x01(\x08\"\xb5\x02\n\x0f\x42\x61seGlupMessage\x12(\n\x0bglup_origin\x18\x01 \x01(\x0b\x32\x13.Criteo.Glup.Origin\x12)\n\tpartition\x18\x02 \x01(\x0b\x32\x16.Criteo.Glup.Partition\x12\x41\n\nset_fields\x18\xda\x86\x03 \x03(\x0b\x32+.Criteo.Glup.BaseGlupMessage.SetFieldsEntry\x12R\n\x0f\x63ontrol_message\x18\xff\xff\x7f \x03(\x0b\x32%.Criteo.Glup.ControlMessage.WatermarkB\x10\x92\xb5\x18\x0c\n\n__metadata\x1a\x30\n\x0eSetFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01:\x04\x88\xb5\x18\x01\"\xf2\x01\n\x19\x46orwardedWatermarkMessage\x12\x1d\n\x15original_kafka_offset\x18\x05 \x01(\x03\x12\x11\n\ttimestamp\x18\x06 \x01(\x03\x12\x1d\n\x15\x63onsolidation_enabled\x18\x07 \x01(\x08\x12\x12\n\ndataset_id\x18\n \x01(\t\x12\x1c\n\x14\x64\x61taset_format_label\x18\x0b \x01(\t\x12R\n\x0f\x63ontrol_message\x18\xff\xff\x7f \x03(\x0b\x32%.Criteo.Glup.ControlMessage.WatermarkB\x10\x92\xb5\x18\x0c\n\n__metadata\"y\n\x08Location\x12%\n\x03\x65nv\x18\x01 \x01(\x0e\x32\x18.Criteo.Glup.Environment\x12#\n\x02\x64\x63\x18\x02 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12\r\n\x05label\x18\x03 \x01(\t\x12\x12\n\ndataset_id\x18\x04 \x01(\t\"\xa2\x01\n\x06Origin\x12+\n\ndatacenter\x18\x01 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12\x1a\n\x03ip4\x18\x02 \x01(\x07\x42\r\x8a\xb5\x18\t\n\x07host_ip\x12\x10\n\x08hostname\x18\x03 \x01(\t\x12\x1e\n\x0e\x63ontainer_task\x18\x04 \x01(\tB\x06\x8a\xb5\x18\x02\x10\x01\x12\x1d\n\rcontainer_app\x18\x05 \x01(\tB\x06\x8a\xb5\x18\x02\x10\x01\"\x89\x05\n\x0e\x43ontrolMessage\x12\x38\n\twatermark\x18\x01 \x01(\x0b\x32%.Criteo.Glup.ControlMessage.Watermark\x1a\x89\x01\n\x0fWatermarkOrigin\x12\x13\n\x0bkafka_topic\x18\x01 \x01(\t\x12+\n\ndatacenter\x18\x02 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12\x34\n\x07\x63luster\x18\x03 \x01(\x0e\x32#.Criteo.Glup.ControlMessage.Cluster\x1a\xe8\x02\n\tWatermark\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x10\n\x08hostname\x18\x02 \x01(\t\x12\x13\n\x0bkafka_topic\x18\x03 \x01(\t\x12\x11\n\tpartition\x18\x04 \x01(\x05\x12\x17\n\x0fpartition_count\x18\x05 \x01(\x05\x12\x14\n\x0cprocess_uuid\x18\x06 \x01(\x0c\x12\x0e\n\x06region\x18\x07 \x01(\t\x12*\n\x11timestamp_seconds\x18\x08 \x01(\x05\x42\x0f\x92\xb5\x18\x0b\n\ttimestamp\x12\x0f\n\x07\x63luster\x18\t \x01(\t\x12\x13\n\x0b\x65nvironment\x18\n \x01(\t\x12J\n\nset_fields\x18\xda\x86\x03 \x03(\x0b\x32\x34.Criteo.Glup.ControlMessage.Watermark.SetFieldsEntry\x1a\x30\n\x0eSetFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01:\x04\x88\xb5\x18\x01\"F\n\x07\x43luster\x12\x17\n\x13UNSUPPORTED_CLUSTER\x10\x00\x12\t\n\x05LOCAL\x10\x02\x12\x0b\n\x07\x43\x45NTRAL\x10\x03\x12\n\n\x06STREAM\x10\x04\"\x99\x01\n\tPartition\x12*\n\x11timestamp_seconds\x18\x01 \x01(\x04\x42\x0f\x8a\xb5\x18\x0b\n\ttimestamp\x12,\n\rhost_platform\x18\x02 \x01(\x0e\x32\x15.Criteo.Glup.Platform\x12\x32\n\nevent_type\x18\x03 \x01(\x0e\x32\x16.Criteo.Glup.EventTypeB\x06\x8a\xb5\x18\x02\x10\x01\"\x93\x01\n\rHDFSPartition\x12\x19\n\x11timestamp_seconds\x18\x01 \x01(\x04\x12,\n\rhost_platform\x18\x02 \x01(\x0e\x32\x15.Criteo.Glup.Platform\x12*\n\nevent_type\x18\x03 \x01(\x0e\x32\x16.Criteo.Glup.EventType\x12\r\n\x05\x64\x65pth\x18\x04 \x01(\x05\"\xa5\x01\n\x07Hash128\x12\x15\n\rmost_sig_bits\x18\x01 \x01(\x06\x12\x16\n\x0eleast_sig_bits\x18\x02 \x01(\x06\x12\x39\n\nset_fields\x18\xda\x86\x03 \x03(\x0b\x32#.Criteo.Glup.Hash128.SetFieldsEntry\x1a\x30\n\x0eSetFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01*~\n\x0fPartitionScheme\x12 \n\x1cUNSUPPORTED_PARTITION_SCHEME\x10\x00\x12\t\n\x05\x44\x41ILY\x10\x02\x12\n\n\x06HOURLY\x10\x03\x12\x13\n\x0fPLATFORM_HOURLY\x10\x04\x12\x1d\n\x19\x45VENTTYPE_PLATFORM_HOURLY\x10\x05*?\n\rMessageFormat\x12\x16\n\x12UNSUPPORTED_FORMAT\x10\x00\x12\x08\n\x04JSON\x10\x01\x12\x0c\n\x08PROTOBUF\x10\x02*d\n\x0eHDFSDataFormat\x12\x1b\n\x17UNSUPPORTED_DATA_FORMAT\x10\x00\x12\r\n\tJSON_PAIL\x10\x02\x12\x10\n\x0cPROTOBUF_SEQ\x10\x03\x12\x14\n\x10PROTOBUF_PARQUET\x10\x04*3\n\x0b\x44\x61taSetKind\x12\x14\n\x10UNSUPPORTED_KIND\x10\x00\x12\x0e\n\nTIMESERIES\x10\x01*\x9a\x01\n\x0fMonitoringLevel\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x15\n\x11REMOVE_MONITORING\x10\x01\x12\x1a\n\x16INFORMATIVE_MONITORING\x10\x02\x12\x15\n\x11\x43ONSENSUS_IGNORED\x10\x03\x12\x30\n,CONSENSUS_IGNORED_AND_INFORMATIVE_MONITORING\x10\x04*\x8b\x01\n\nDataCenter\x12\x1a\n\x16UNSUPPORTED_DATACENTER\x10\x00\x12\x07\n\x03\x41M5\x10\x02\x12\x07\n\x03HK5\x10\x03\x12\x07\n\x03NY8\x10\x04\x12\x07\n\x03PAR\x10\x05\x12\x07\n\x03PA4\x10\x06\x12\x07\n\x03SH5\x10\x07\x12\x07\n\x03SV6\x10\x08\x12\x07\n\x03TY5\x10\t\x12\x07\n\x03VA1\x10\n\x12\x07\n\x03\x41M6\x10\x0b\x12\x07\n\x03\x44\x41\x31\x10\x0c*A\n\x0b\x45nvironment\x12\x1b\n\x17UNSUPPORTED_ENVIRONMENT\x10\x00\x12\x0b\n\x07PREPROD\x10\x01\x12\x08\n\x04PROD\x10\x02*D\n\x08Platform\x12\x18\n\x14UNSUPPORTED_PLATFORM\x10\x00\x12\x06\n\x02\x45U\x10\x02\x12\x06\n\x02US\x10\x03\x12\x06\n\x02\x41S\x10\x04\x12\x06\n\x02\x43N\x10\x05*[\n\tEventType\x12\x1a\n\x16UNSUPPORTED_EVENT_TYPE\x10\x00\x12\x10\n\x0cItemPageView\x10\x02\x12\t\n\x05Sales\x10\x03\x12\n\n\x06\x42\x61sket\x10\x04\x12\t\n\x05Other\x10\x05*%\n\x05YesNo\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02NO\x10\x01\x12\x07\n\x03YES\x10\x02:I\n\x04glup\x12\x1f.google.protobuf.MessageOptions\x18\xd0\x86\x03 \x01(\x0b\x32\x18.Criteo.Glup.GlupOptions:C\n\x18\x63ontains_nullable_fields\x12\x1f.google.protobuf.MessageOptions\x18\xd1\x86\x03 \x01(\x08:Q\n\tglupfield\x12\x1d.google.protobuf.FieldOptions\x18\xd0\x86\x03 \x01(\x0b\x32\x1d.Criteo.Glup.GlupFieldOptions:O\n\x0cjson_mapping\x12\x1d.google.protobuf.FieldOptions\x18\xd1\x86\x03 \x01(\x0b\x32\x18.Criteo.Glup.JsonMapping:E\n\x04json\x12\x1d.google.protobuf.FieldOptions\x18\xd2\x86\x03 \x01(\x0b\x32\x16.Criteo.Glup.JsonAliasB\x11\n\x0f\x63om.criteo.glupb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\nAtests/integration/schema_registry/data/proto/metadata_proto.proto\x12\x0b\x43riteo.Glup\x1a google/protobuf/descriptor.proto\"$\n\x13KafkaMessageOptions\x12\r\n\x05topic\x18\x01 \x03(\t\"\x80\x02\n\x07\x44\x61taSet\x12\n\n\x02id\x18\x01 \x01(\t\x12*\n\x06\x66ormat\x18\x02 \x03(\x0b\x32\x1a.Criteo.Glup.DataSetFormat\x12\x36\n\x10partition_scheme\x18\x03 \x01(\x0e\x32\x1c.Criteo.Glup.PartitionScheme\x12\x12\n\njava_class\x18\x04 \x01(\t\x12\x11\n\tfor_tests\x18\x05 \x01(\x08\x12\r\n\x05owner\x18\x06 \x01(\t\x12\x0f\n\x07private\x18\x07 \x01(\x08\x12&\n\x04kind\x18\x08 \x01(\x0e\x32\x18.Criteo.Glup.DataSetKind\x12\x16\n\x0eretention_days\x18\t \x01(\x05\"x\n\x0c\x44\x61taSetChunk\x12)\n\tpartition\x18\x01 \x03(\x0b\x32\x16.Criteo.Glup.Partition\x12*\n\x06\x66ormat\x18\x02 \x01(\x0b\x32\x1a.Criteo.Glup.DataSetFormat\x12\x11\n\tdatasetId\x18\x03 \x01(\t\"\xe6\x02\n\rDataSetFormat\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x30\n\x0b\x66ile_format\x18\x02 \x01(\x0e\x32\x1b.Criteo.Glup.HDFSDataFormat\x12\x36\n\x10partition_scheme\x18\x03 \x01(\x0e\x32\x1c.Criteo.Glup.PartitionScheme\x12\x33\n\x0fstart_partition\x18\x04 \x01(\x0b\x32\x1a.Criteo.Glup.HDFSPartition\x12\x31\n\rend_partition\x18\x05 \x01(\x0b\x32\x1a.Criteo.Glup.HDFSPartition\x12\x16\n\x0eretention_days\x18\x07 \x01(\x05\x12\x10\n\x08priority\x18\x08 \x01(\x05\x12\r\n\x05label\x18\t \x01(\t\x12\x36\n\x10monitoring_level\x18\n \x01(\x0e\x32\x1c.Criteo.Glup.MonitoringLevelJ\x04\x08\x06\x10\x07\"\xce\x19\n\x0bHDFSOptions\x12\x36\n\x06import\x18\x03 \x03(\x0b\x32&.Criteo.Glup.HDFSOptions.ImportOptions\x1a\x86\x19\n\rImportOptions\x12\r\n\x05owner\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x32\n\x0cpartitioning\x18\x04 \x01(\x0e\x32\x1c.Criteo.Glup.PartitionScheme\x12+\n\x06\x66ormat\x18\x05 \x01(\x0e\x32\x1b.Criteo.Glup.HDFSDataFormat\x12\x0f\n\x07private\x18\x06 \x01(\x08\x12\x43\n\tgenerator\x18\x0b \x03(\x0b\x32\x30.Criteo.Glup.HDFSOptions.ImportOptions.Generator\x12\x39\n\x04view\x18\x0c \x03(\x0b\x32+.Criteo.Glup.HDFSOptions.ImportOptions.View\x1a\x90\x01\n\x04View\x12\x45\n\x04hive\x18\n \x01(\x0b\x32\x37.Criteo.Glup.HDFSOptions.ImportOptions.View.HiveOptions\x1a\x41\n\x0bHiveOptions\x12\x32\n\x0cpartitioning\x18\x03 \x01(\x0e\x32\x1c.Criteo.Glup.PartitionScheme\x1a\xd2\x15\n\tGenerator\x12V\n\ndataloader\x18\x01 \x01(\x0b\x32\x42.Criteo.Glup.HDFSOptions.ImportOptions.Generator.DataloaderOptions\x12V\n\nkafka2hdfs\x18\x02 \x01(\x0b\x32\x42.Criteo.Glup.HDFSOptions.ImportOptions.Generator.Kafka2HdfsOptions\x12J\n\x04sync\x18\x03 \x01(\x0b\x32<.Criteo.Glup.HDFSOptions.ImportOptions.Generator.SyncOptions\x12R\n\x08\x65xternal\x18\x04 \x01(\x0b\x32@.Criteo.Glup.HDFSOptions.ImportOptions.Generator.ExternalOptions\x12N\n\x06\x62\x61\x63kup\x18\x05 \x01(\x0b\x32>.Criteo.Glup.HDFSOptions.ImportOptions.Generator.BackupOptions\x12X\n\x0btranscoding\x18\x06 \x01(\x0b\x32\x43.Criteo.Glup.HDFSOptions.ImportOptions.Generator.TranscodingOptions\x12N\n\x06kacoha\x18\x07 \x01(\x0b\x32>.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KaCoHaOptions\x12R\n\x0b\x64\x65\x64uplicate\x18\x08 \x01(\x0b\x32=.Criteo.Glup.HDFSOptions.ImportOptions.Generator.DedupOptions\x12P\n\x07sampler\x18\t \x01(\x0b\x32?.Criteo.Glup.HDFSOptions.ImportOptions.Generator.SamplerOptions\x12V\n\ncomparator\x18\n \x01(\x0b\x32\x42.Criteo.Glup.HDFSOptions.ImportOptions.Generator.ComparatorOptions\x12\"\n\x02to\x18\xfa\x01 \x03(\x0b\x32\x15.Criteo.Glup.Location\x12\x12\n\tnamespace\x18\xfb\x01 \x01(\t\x12\x13\n\nstart_date\x18\xfd\x01 \x01(\t\x12\x12\n\tstop_date\x18\xfe\x01 \x01(\t\x12\x12\n\tignore_cn\x18\xff\x01 \x01(\x08\x1a\x9a\x01\n\x0c\x44\x65\x64upOptions\x12\x18\n\x10input_dataset_id\x18\x01 \x01(\t\x12\x1a\n\x12input_format_label\x18\x02 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x03 \x01(\t\x12\x1b\n\x13output_format_label\x18\x04 \x01(\t\x12\x1c\n\x14use_hippo_cuttle_job\x18\x05 \x01(\x08\x1au\n\x11Kafka2HdfsOptions\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65\x64uplicate\x18\x03 \x01(\x08\x12\x19\n\x11output_dataset_id\x18\x04 \x01(\t\x12\x1b\n\x13output_format_label\x18\x05 \x01(\tJ\x04\x08\x02\x10\x03\x1aK\n\x0cKacohaConfig\x12\x1b\n\x13partitions_per_task\x18\x01 \x01(\x05\x12\x1e\n\x16poll_buffer_size_bytes\x18\x02 \x01(\x05\x1a\x87\x01\n\x11KacohaConfigPerDc\x12#\n\x02\x64\x63\x18\x01 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12M\n\x06\x63onfig\x18\x02 \x01(\x0b\x32=.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KacohaConfig\x1a\x95\x02\n\rKaCoHaOptions\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65\x64uplicate\x18\x03 \x01(\x08\x12M\n\x06\x63onfig\x18\x04 \x01(\x0b\x32=.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KacohaConfig\x12\x1b\n\x13output_format_label\x18\x05 \x01(\t\x12Y\n\rconfig_per_dc\x18\x06 \x03(\x0b\x32\x42.Criteo.Glup.HDFSOptions.ImportOptions.Generator.KacohaConfigPerDc\x1a<\n\x11\x44\x61taloaderOptions\x12\'\n\x08platform\x18\x01 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x1a\xf1\x01\n\x0bSyncOptions\x12#\n\x04\x66rom\x18\x01 \x01(\x0b\x32\x15.Criteo.Glup.Location\x12\x18\n\x10source_namespace\x18\x03 \x01(\t\x12(\n\tplatforms\x18\x06 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x12\x16\n\x0eis_backfilling\x18\x08 \x01(\x08\x12\x10\n\x08to_label\x18\t \x01(\t\x12\x15\n\rto_dataset_id\x18\n \x01(\t\x12\x18\n\x10with_backfilling\x18\x0b \x01(\x08\x12\x1e\n\x16is_scheduled_on_source\x18\x0c \x01(\x08\x1ax\n\rBackupOptions\x12#\n\x04\x66rom\x18\x01 \x01(\x0b\x32\x15.Criteo.Glup.Location\x12\x18\n\x10source_namespace\x18\x02 \x01(\t\x12(\n\tplatforms\x18\x03 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x1a\x83\x02\n\x12TranscodingOptions\x12\x18\n\x10input_dataset_id\x18\x01 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x02 \x01(\t\x12\x31\n\x0cinput_format\x18\x03 \x01(\x0e\x32\x1b.Criteo.Glup.HDFSDataFormat\x12\x32\n\routput_format\x18\x04 \x01(\x0e\x32\x1b.Criteo.Glup.HDFSDataFormat\x12\x1b\n\x13input_dataset_label\x18\x05 \x01(\t\x12\x1c\n\x14output_dataset_label\x18\x06 \x01(\t\x12\x16\n\x0eis_by_platform\x18\x07 \x01(\x08\x1a\x95\x01\n\x0eSamplerOptions\x12\x18\n\x10input_dataset_id\x18\x01 \x01(\t\x12\x1a\n\x12input_format_label\x18\x02 \x01(\t\x12\x19\n\x11output_dataset_id\x18\x03 \x01(\t\x12\x1b\n\x13output_format_label\x18\x04 \x01(\t\x12\x15\n\rsampling_rate\x18\x05 \x01(\x02\x1a\xa7\x01\n\x11\x43omparatorOptions\x12\x17\n\x0fleft_dataset_id\x18\x01 \x01(\t\x12\x19\n\x11left_format_label\x18\x02 \x01(\t\x12\x18\n\x10right_dataset_id\x18\x03 \x01(\t\x12\x1a\n\x12right_format_label\x18\x04 \x01(\t\x12\x10\n\x08hostname\x18\x05 \x01(\t\x12\x16\n\x0eignored_fields\x18\x06 \x01(\t\x1a\x11\n\x0f\x45xternalOptions\"9\n\x18ProducerTransportOptions\x12\x0e\n\x06syslog\x18\x01 \x01(\x08\x12\r\n\x05kafka\x18\x02 \x01(\x08\"8\n\x0fPropertyOptions\x12\x10\n\x08valuable\x18\x01 \x01(\x08\x12\x13\n\x0bhigh_volume\x18\x02 \x01(\x08\"\xcb\x02\n\x0bGlupOptions\x12/\n\x05kafka\x18\x01 \x01(\x0b\x32 .Criteo.Glup.KafkaMessageOptions\x12&\n\x04hdfs\x18\x02 \x01(\x0b\x32\x18.Criteo.Glup.HDFSOptions\x12\x14\n\x0csampling_pct\x18\x03 \x01(\r\x12\x1c\n\x14preprod_sampling_pct\x18\x04 \x01(\r\x12%\n\x07\x64\x61taset\x18\x05 \x03(\x0b\x32\x14.Criteo.Glup.DataSet\x12\x1c\n\x14message_sampling_pct\x18\x06 \x01(\r\x12\x38\n\tproducers\x18\x07 \x01(\x0b\x32%.Criteo.Glup.ProducerTransportOptions\x12\x30\n\nproperties\x18\x08 \x01(\x0b\x32\x1c.Criteo.Glup.PropertyOptions\"\xb1\x01\n\x10GlupFieldOptions\x12\x0f\n\x07sampled\x18\x01 \x01(\x08\x12\x14\n\x0csampling_key\x18\x02 \x01(\x08\x12\x30\n\x11\x64isabled_platform\x18\x03 \x03(\x0e\x32\x15.Criteo.Glup.Platform\x12\x18\n\x10should_clean_pii\x18\x04 \x01(\x08\x12\x18\n\x10pending_deletion\x18\x05 \x01(\x08\x12\x10\n\x08\x61\x64\x64\x65\x64_at\x18\x06 \x01(\t\")\n\x0bJsonMapping\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04skip\x18\x02 \x01(\x08\"4\n\tJsonAlias\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11use_enum_field_id\x18\x03 \x01(\x08\"\xb5\x02\n\x0f\x42\x61seGlupMessage\x12(\n\x0bglup_origin\x18\x01 \x01(\x0b\x32\x13.Criteo.Glup.Origin\x12)\n\tpartition\x18\x02 \x01(\x0b\x32\x16.Criteo.Glup.Partition\x12\x41\n\nset_fields\x18\xda\x86\x03 \x03(\x0b\x32+.Criteo.Glup.BaseGlupMessage.SetFieldsEntry\x12R\n\x0f\x63ontrol_message\x18\xff\xff\x7f \x03(\x0b\x32%.Criteo.Glup.ControlMessage.WatermarkB\x10\x92\xb5\x18\x0c\n\n__metadata\x1a\x30\n\x0eSetFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01:\x04\x88\xb5\x18\x01\"\xf2\x01\n\x19\x46orwardedWatermarkMessage\x12\x1d\n\x15original_kafka_offset\x18\x05 \x01(\x03\x12\x11\n\ttimestamp\x18\x06 \x01(\x03\x12\x1d\n\x15\x63onsolidation_enabled\x18\x07 \x01(\x08\x12\x12\n\ndataset_id\x18\n \x01(\t\x12\x1c\n\x14\x64\x61taset_format_label\x18\x0b \x01(\t\x12R\n\x0f\x63ontrol_message\x18\xff\xff\x7f \x03(\x0b\x32%.Criteo.Glup.ControlMessage.WatermarkB\x10\x92\xb5\x18\x0c\n\n__metadata\"y\n\x08Location\x12%\n\x03\x65nv\x18\x01 \x01(\x0e\x32\x18.Criteo.Glup.Environment\x12#\n\x02\x64\x63\x18\x02 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12\r\n\x05label\x18\x03 \x01(\t\x12\x12\n\ndataset_id\x18\x04 \x01(\t\"\xa2\x01\n\x06Origin\x12+\n\ndatacenter\x18\x01 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12\x1a\n\x03ip4\x18\x02 \x01(\x07\x42\r\x8a\xb5\x18\t\n\x07host_ip\x12\x10\n\x08hostname\x18\x03 \x01(\t\x12\x1e\n\x0e\x63ontainer_task\x18\x04 \x01(\tB\x06\x8a\xb5\x18\x02\x10\x01\x12\x1d\n\rcontainer_app\x18\x05 \x01(\tB\x06\x8a\xb5\x18\x02\x10\x01\"\x89\x05\n\x0e\x43ontrolMessage\x12\x38\n\twatermark\x18\x01 \x01(\x0b\x32%.Criteo.Glup.ControlMessage.Watermark\x1a\x89\x01\n\x0fWatermarkOrigin\x12\x13\n\x0bkafka_topic\x18\x01 \x01(\t\x12+\n\ndatacenter\x18\x02 \x01(\x0e\x32\x17.Criteo.Glup.DataCenter\x12\x34\n\x07\x63luster\x18\x03 \x01(\x0e\x32#.Criteo.Glup.ControlMessage.Cluster\x1a\xe8\x02\n\tWatermark\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x10\n\x08hostname\x18\x02 \x01(\t\x12\x13\n\x0bkafka_topic\x18\x03 \x01(\t\x12\x11\n\tpartition\x18\x04 \x01(\x05\x12\x17\n\x0fpartition_count\x18\x05 \x01(\x05\x12\x14\n\x0cprocess_uuid\x18\x06 \x01(\x0c\x12\x0e\n\x06region\x18\x07 \x01(\t\x12*\n\x11timestamp_seconds\x18\x08 \x01(\x05\x42\x0f\x92\xb5\x18\x0b\n\ttimestamp\x12\x0f\n\x07\x63luster\x18\t \x01(\t\x12\x13\n\x0b\x65nvironment\x18\n \x01(\t\x12J\n\nset_fields\x18\xda\x86\x03 \x03(\x0b\x32\x34.Criteo.Glup.ControlMessage.Watermark.SetFieldsEntry\x1a\x30\n\x0eSetFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01:\x04\x88\xb5\x18\x01\"F\n\x07\x43luster\x12\x17\n\x13UNSUPPORTED_CLUSTER\x10\x00\x12\t\n\x05LOCAL\x10\x02\x12\x0b\n\x07\x43\x45NTRAL\x10\x03\x12\n\n\x06STREAM\x10\x04\"\x99\x01\n\tPartition\x12*\n\x11timestamp_seconds\x18\x01 \x01(\x04\x42\x0f\x8a\xb5\x18\x0b\n\ttimestamp\x12,\n\rhost_platform\x18\x02 \x01(\x0e\x32\x15.Criteo.Glup.Platform\x12\x32\n\nevent_type\x18\x03 \x01(\x0e\x32\x16.Criteo.Glup.EventTypeB\x06\x8a\xb5\x18\x02\x10\x01\"\x93\x01\n\rHDFSPartition\x12\x19\n\x11timestamp_seconds\x18\x01 \x01(\x04\x12,\n\rhost_platform\x18\x02 \x01(\x0e\x32\x15.Criteo.Glup.Platform\x12*\n\nevent_type\x18\x03 \x01(\x0e\x32\x16.Criteo.Glup.EventType\x12\r\n\x05\x64\x65pth\x18\x04 \x01(\x05\"\xa5\x01\n\x07Hash128\x12\x15\n\rmost_sig_bits\x18\x01 \x01(\x06\x12\x16\n\x0eleast_sig_bits\x18\x02 \x01(\x06\x12\x39\n\nset_fields\x18\xda\x86\x03 \x03(\x0b\x32#.Criteo.Glup.Hash128.SetFieldsEntry\x1a\x30\n\x0eSetFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01*~\n\x0fPartitionScheme\x12 \n\x1cUNSUPPORTED_PARTITION_SCHEME\x10\x00\x12\t\n\x05\x44\x41ILY\x10\x02\x12\n\n\x06HOURLY\x10\x03\x12\x13\n\x0fPLATFORM_HOURLY\x10\x04\x12\x1d\n\x19\x45VENTTYPE_PLATFORM_HOURLY\x10\x05*?\n\rMessageFormat\x12\x16\n\x12UNSUPPORTED_FORMAT\x10\x00\x12\x08\n\x04JSON\x10\x01\x12\x0c\n\x08PROTOBUF\x10\x02*d\n\x0eHDFSDataFormat\x12\x1b\n\x17UNSUPPORTED_DATA_FORMAT\x10\x00\x12\r\n\tJSON_PAIL\x10\x02\x12\x10\n\x0cPROTOBUF_SEQ\x10\x03\x12\x14\n\x10PROTOBUF_PARQUET\x10\x04*3\n\x0b\x44\x61taSetKind\x12\x14\n\x10UNSUPPORTED_KIND\x10\x00\x12\x0e\n\nTIMESERIES\x10\x01*\x9a\x01\n\x0fMonitoringLevel\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x15\n\x11REMOVE_MONITORING\x10\x01\x12\x1a\n\x16INFORMATIVE_MONITORING\x10\x02\x12\x15\n\x11\x43ONSENSUS_IGNORED\x10\x03\x12\x30\n,CONSENSUS_IGNORED_AND_INFORMATIVE_MONITORING\x10\x04*\x8b\x01\n\nDataCenter\x12\x1a\n\x16UNSUPPORTED_DATACENTER\x10\x00\x12\x07\n\x03\x41M5\x10\x02\x12\x07\n\x03HK5\x10\x03\x12\x07\n\x03NY8\x10\x04\x12\x07\n\x03PAR\x10\x05\x12\x07\n\x03PA4\x10\x06\x12\x07\n\x03SH5\x10\x07\x12\x07\n\x03SV6\x10\x08\x12\x07\n\x03TY5\x10\t\x12\x07\n\x03VA1\x10\n\x12\x07\n\x03\x41M6\x10\x0b\x12\x07\n\x03\x44\x41\x31\x10\x0c*A\n\x0b\x45nvironment\x12\x1b\n\x17UNSUPPORTED_ENVIRONMENT\x10\x00\x12\x0b\n\x07PREPROD\x10\x01\x12\x08\n\x04PROD\x10\x02*D\n\x08Platform\x12\x18\n\x14UNSUPPORTED_PLATFORM\x10\x00\x12\x06\n\x02\x45U\x10\x02\x12\x06\n\x02US\x10\x03\x12\x06\n\x02\x41S\x10\x04\x12\x06\n\x02\x43N\x10\x05*[\n\tEventType\x12\x1a\n\x16UNSUPPORTED_EVENT_TYPE\x10\x00\x12\x10\n\x0cItemPageView\x10\x02\x12\t\n\x05Sales\x10\x03\x12\n\n\x06\x42\x61sket\x10\x04\x12\t\n\x05Other\x10\x05*%\n\x05YesNo\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02NO\x10\x01\x12\x07\n\x03YES\x10\x02:I\n\x04glup\x12\x1f.google.protobuf.MessageOptions\x18\xd0\x86\x03 \x01(\x0b\x32\x18.Criteo.Glup.GlupOptions:C\n\x18\x63ontains_nullable_fields\x12\x1f.google.protobuf.MessageOptions\x18\xd1\x86\x03 \x01(\x08:Q\n\tglupfield\x12\x1d.google.protobuf.FieldOptions\x18\xd0\x86\x03 \x01(\x0b\x32\x1d.Criteo.Glup.GlupFieldOptions:O\n\x0cjson_mapping\x12\x1d.google.protobuf.FieldOptions\x18\xd1\x86\x03 \x01(\x0b\x32\x18.Criteo.Glup.JsonMapping:E\n\x04json\x12\x1d.google.protobuf.FieldOptions\x18\xd2\x86\x03 \x01(\x0b\x32\x16.Criteo.Glup.JsonAliasB\x11\n\x0f\x63om.criteo.glupb\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tests.integration.schema_registry.data.proto.metadata_proto_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, 'tests.integration.schema_registry.data.proto.metadata_proto_pb2', globals() +) if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(glup) - google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(contains_nullable_fields) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(glupfield) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(json_mapping) - google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(json) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(glup) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(contains_nullable_fields) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(glupfield) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(json_mapping) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(json) - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\017com.criteo.glup' - _BASEGLUPMESSAGE_SETFIELDSENTRY._options = None - _BASEGLUPMESSAGE_SETFIELDSENTRY._serialized_options = b'8\001' - _BASEGLUPMESSAGE.fields_by_name['control_message']._options = None - _BASEGLUPMESSAGE.fields_by_name['control_message']._serialized_options = b'\222\265\030\014\n\n__metadata' - _BASEGLUPMESSAGE._options = None - _BASEGLUPMESSAGE._serialized_options = b'\210\265\030\001' - _FORWARDEDWATERMARKMESSAGE.fields_by_name['control_message']._options = None - _FORWARDEDWATERMARKMESSAGE.fields_by_name['control_message']._serialized_options = b'\222\265\030\014\n\n__metadata' - _ORIGIN.fields_by_name['ip4']._options = None - _ORIGIN.fields_by_name['ip4']._serialized_options = b'\212\265\030\t\n\007host_ip' - _ORIGIN.fields_by_name['container_task']._options = None - _ORIGIN.fields_by_name['container_task']._serialized_options = b'\212\265\030\002\020\001' - _ORIGIN.fields_by_name['container_app']._options = None - _ORIGIN.fields_by_name['container_app']._serialized_options = b'\212\265\030\002\020\001' - _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._options = None - _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._serialized_options = b'8\001' - _CONTROLMESSAGE_WATERMARK.fields_by_name['timestamp_seconds']._options = None - _CONTROLMESSAGE_WATERMARK.fields_by_name['timestamp_seconds']._serialized_options = b'\222\265\030\013\n\ttimestamp' - _CONTROLMESSAGE_WATERMARK._options = None - _CONTROLMESSAGE_WATERMARK._serialized_options = b'\210\265\030\001' - _PARTITION.fields_by_name['timestamp_seconds']._options = None - _PARTITION.fields_by_name['timestamp_seconds']._serialized_options = b'\212\265\030\013\n\ttimestamp' - _PARTITION.fields_by_name['event_type']._options = None - _PARTITION.fields_by_name['event_type']._serialized_options = b'\212\265\030\002\020\001' - _HASH128_SETFIELDSENTRY._options = None - _HASH128_SETFIELDSENTRY._serialized_options = b'8\001' - _PARTITIONSCHEME._serialized_start=6876 - _PARTITIONSCHEME._serialized_end=7002 - _MESSAGEFORMAT._serialized_start=7004 - _MESSAGEFORMAT._serialized_end=7067 - _HDFSDATAFORMAT._serialized_start=7069 - _HDFSDATAFORMAT._serialized_end=7169 - _DATASETKIND._serialized_start=7171 - _DATASETKIND._serialized_end=7222 - _MONITORINGLEVEL._serialized_start=7225 - _MONITORINGLEVEL._serialized_end=7379 - _DATACENTER._serialized_start=7382 - _DATACENTER._serialized_end=7521 - _ENVIRONMENT._serialized_start=7523 - _ENVIRONMENT._serialized_end=7588 - _PLATFORM._serialized_start=7590 - _PLATFORM._serialized_end=7658 - _EVENTTYPE._serialized_start=7660 - _EVENTTYPE._serialized_end=7751 - _YESNO._serialized_start=7753 - _YESNO._serialized_end=7790 - _KAFKAMESSAGEOPTIONS._serialized_start=116 - _KAFKAMESSAGEOPTIONS._serialized_end=152 - _DATASET._serialized_start=155 - _DATASET._serialized_end=411 - _DATASETCHUNK._serialized_start=413 - _DATASETCHUNK._serialized_end=533 - _DATASETFORMAT._serialized_start=536 - _DATASETFORMAT._serialized_end=894 - _HDFSOPTIONS._serialized_start=897 - _HDFSOPTIONS._serialized_end=4175 - _HDFSOPTIONS_IMPORTOPTIONS._serialized_start=969 - _HDFSOPTIONS_IMPORTOPTIONS._serialized_end=4175 - _HDFSOPTIONS_IMPORTOPTIONS_VIEW._serialized_start=1258 - _HDFSOPTIONS_IMPORTOPTIONS_VIEW._serialized_end=1402 - _HDFSOPTIONS_IMPORTOPTIONS_VIEW_HIVEOPTIONS._serialized_start=1337 - _HDFSOPTIONS_IMPORTOPTIONS_VIEW_HIVEOPTIONS._serialized_end=1402 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR._serialized_start=1405 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR._serialized_end=4175 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DEDUPOPTIONS._serialized_start=2376 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DEDUPOPTIONS._serialized_end=2530 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KAFKA2HDFSOPTIONS._serialized_start=2532 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KAFKA2HDFSOPTIONS._serialized_end=2649 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIG._serialized_start=2651 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIG._serialized_end=2726 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIGPERDC._serialized_start=2729 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIGPERDC._serialized_end=2864 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHAOPTIONS._serialized_start=2867 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHAOPTIONS._serialized_end=3144 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DATALOADEROPTIONS._serialized_start=3146 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DATALOADEROPTIONS._serialized_end=3206 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SYNCOPTIONS._serialized_start=3209 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SYNCOPTIONS._serialized_end=3450 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_BACKUPOPTIONS._serialized_start=3452 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_BACKUPOPTIONS._serialized_end=3572 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_TRANSCODINGOPTIONS._serialized_start=3575 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_TRANSCODINGOPTIONS._serialized_end=3834 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SAMPLEROPTIONS._serialized_start=3837 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SAMPLEROPTIONS._serialized_end=3986 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_COMPARATOROPTIONS._serialized_start=3989 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_COMPARATOROPTIONS._serialized_end=4156 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_EXTERNALOPTIONS._serialized_start=4158 - _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_EXTERNALOPTIONS._serialized_end=4175 - _PRODUCERTRANSPORTOPTIONS._serialized_start=4177 - _PRODUCERTRANSPORTOPTIONS._serialized_end=4234 - _PROPERTYOPTIONS._serialized_start=4236 - _PROPERTYOPTIONS._serialized_end=4292 - _GLUPOPTIONS._serialized_start=4295 - _GLUPOPTIONS._serialized_end=4626 - _GLUPFIELDOPTIONS._serialized_start=4629 - _GLUPFIELDOPTIONS._serialized_end=4806 - _JSONMAPPING._serialized_start=4808 - _JSONMAPPING._serialized_end=4849 - _JSONALIAS._serialized_start=4851 - _JSONALIAS._serialized_end=4903 - _BASEGLUPMESSAGE._serialized_start=4906 - _BASEGLUPMESSAGE._serialized_end=5215 - _BASEGLUPMESSAGE_SETFIELDSENTRY._serialized_start=5161 - _BASEGLUPMESSAGE_SETFIELDSENTRY._serialized_end=5209 - _FORWARDEDWATERMARKMESSAGE._serialized_start=5218 - _FORWARDEDWATERMARKMESSAGE._serialized_end=5460 - _LOCATION._serialized_start=5462 - _LOCATION._serialized_end=5583 - _ORIGIN._serialized_start=5586 - _ORIGIN._serialized_end=5748 - _CONTROLMESSAGE._serialized_start=5751 - _CONTROLMESSAGE._serialized_end=6400 - _CONTROLMESSAGE_WATERMARKORIGIN._serialized_start=5828 - _CONTROLMESSAGE_WATERMARKORIGIN._serialized_end=5965 - _CONTROLMESSAGE_WATERMARK._serialized_start=5968 - _CONTROLMESSAGE_WATERMARK._serialized_end=6328 - _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._serialized_start=5161 - _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._serialized_end=5209 - _CONTROLMESSAGE_CLUSTER._serialized_start=6330 - _CONTROLMESSAGE_CLUSTER._serialized_end=6400 - _PARTITION._serialized_start=6403 - _PARTITION._serialized_end=6556 - _HDFSPARTITION._serialized_start=6559 - _HDFSPARTITION._serialized_end=6706 - _HASH128._serialized_start=6709 - _HASH128._serialized_end=6874 - _HASH128_SETFIELDSENTRY._serialized_start=5161 - _HASH128_SETFIELDSENTRY._serialized_end=5209 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\017com.criteo.glup' + _BASEGLUPMESSAGE_SETFIELDSENTRY._options = None + _BASEGLUPMESSAGE_SETFIELDSENTRY._serialized_options = b'8\001' + _BASEGLUPMESSAGE.fields_by_name['control_message']._options = None + _BASEGLUPMESSAGE.fields_by_name['control_message']._serialized_options = b'\222\265\030\014\n\n__metadata' + _BASEGLUPMESSAGE._options = None + _BASEGLUPMESSAGE._serialized_options = b'\210\265\030\001' + _FORWARDEDWATERMARKMESSAGE.fields_by_name['control_message']._options = None + _FORWARDEDWATERMARKMESSAGE.fields_by_name['control_message']._serialized_options = b'\222\265\030\014\n\n__metadata' + _ORIGIN.fields_by_name['ip4']._options = None + _ORIGIN.fields_by_name['ip4']._serialized_options = b'\212\265\030\t\n\007host_ip' + _ORIGIN.fields_by_name['container_task']._options = None + _ORIGIN.fields_by_name['container_task']._serialized_options = b'\212\265\030\002\020\001' + _ORIGIN.fields_by_name['container_app']._options = None + _ORIGIN.fields_by_name['container_app']._serialized_options = b'\212\265\030\002\020\001' + _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._options = None + _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._serialized_options = b'8\001' + _CONTROLMESSAGE_WATERMARK.fields_by_name['timestamp_seconds']._options = None + _CONTROLMESSAGE_WATERMARK.fields_by_name['timestamp_seconds']._serialized_options = b'\222\265\030\013\n\ttimestamp' + _CONTROLMESSAGE_WATERMARK._options = None + _CONTROLMESSAGE_WATERMARK._serialized_options = b'\210\265\030\001' + _PARTITION.fields_by_name['timestamp_seconds']._options = None + _PARTITION.fields_by_name['timestamp_seconds']._serialized_options = b'\212\265\030\013\n\ttimestamp' + _PARTITION.fields_by_name['event_type']._options = None + _PARTITION.fields_by_name['event_type']._serialized_options = b'\212\265\030\002\020\001' + _HASH128_SETFIELDSENTRY._options = None + _HASH128_SETFIELDSENTRY._serialized_options = b'8\001' + _PARTITIONSCHEME._serialized_start = 6876 + _PARTITIONSCHEME._serialized_end = 7002 + _MESSAGEFORMAT._serialized_start = 7004 + _MESSAGEFORMAT._serialized_end = 7067 + _HDFSDATAFORMAT._serialized_start = 7069 + _HDFSDATAFORMAT._serialized_end = 7169 + _DATASETKIND._serialized_start = 7171 + _DATASETKIND._serialized_end = 7222 + _MONITORINGLEVEL._serialized_start = 7225 + _MONITORINGLEVEL._serialized_end = 7379 + _DATACENTER._serialized_start = 7382 + _DATACENTER._serialized_end = 7521 + _ENVIRONMENT._serialized_start = 7523 + _ENVIRONMENT._serialized_end = 7588 + _PLATFORM._serialized_start = 7590 + _PLATFORM._serialized_end = 7658 + _EVENTTYPE._serialized_start = 7660 + _EVENTTYPE._serialized_end = 7751 + _YESNO._serialized_start = 7753 + _YESNO._serialized_end = 7790 + _KAFKAMESSAGEOPTIONS._serialized_start = 116 + _KAFKAMESSAGEOPTIONS._serialized_end = 152 + _DATASET._serialized_start = 155 + _DATASET._serialized_end = 411 + _DATASETCHUNK._serialized_start = 413 + _DATASETCHUNK._serialized_end = 533 + _DATASETFORMAT._serialized_start = 536 + _DATASETFORMAT._serialized_end = 894 + _HDFSOPTIONS._serialized_start = 897 + _HDFSOPTIONS._serialized_end = 4175 + _HDFSOPTIONS_IMPORTOPTIONS._serialized_start = 969 + _HDFSOPTIONS_IMPORTOPTIONS._serialized_end = 4175 + _HDFSOPTIONS_IMPORTOPTIONS_VIEW._serialized_start = 1258 + _HDFSOPTIONS_IMPORTOPTIONS_VIEW._serialized_end = 1402 + _HDFSOPTIONS_IMPORTOPTIONS_VIEW_HIVEOPTIONS._serialized_start = 1337 + _HDFSOPTIONS_IMPORTOPTIONS_VIEW_HIVEOPTIONS._serialized_end = 1402 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR._serialized_start = 1405 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR._serialized_end = 4175 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DEDUPOPTIONS._serialized_start = 2376 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DEDUPOPTIONS._serialized_end = 2530 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KAFKA2HDFSOPTIONS._serialized_start = 2532 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KAFKA2HDFSOPTIONS._serialized_end = 2649 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIG._serialized_start = 2651 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIG._serialized_end = 2726 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIGPERDC._serialized_start = 2729 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHACONFIGPERDC._serialized_end = 2864 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHAOPTIONS._serialized_start = 2867 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_KACOHAOPTIONS._serialized_end = 3144 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DATALOADEROPTIONS._serialized_start = 3146 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_DATALOADEROPTIONS._serialized_end = 3206 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SYNCOPTIONS._serialized_start = 3209 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SYNCOPTIONS._serialized_end = 3450 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_BACKUPOPTIONS._serialized_start = 3452 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_BACKUPOPTIONS._serialized_end = 3572 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_TRANSCODINGOPTIONS._serialized_start = 3575 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_TRANSCODINGOPTIONS._serialized_end = 3834 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SAMPLEROPTIONS._serialized_start = 3837 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_SAMPLEROPTIONS._serialized_end = 3986 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_COMPARATOROPTIONS._serialized_start = 3989 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_COMPARATOROPTIONS._serialized_end = 4156 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_EXTERNALOPTIONS._serialized_start = 4158 + _HDFSOPTIONS_IMPORTOPTIONS_GENERATOR_EXTERNALOPTIONS._serialized_end = 4175 + _PRODUCERTRANSPORTOPTIONS._serialized_start = 4177 + _PRODUCERTRANSPORTOPTIONS._serialized_end = 4234 + _PROPERTYOPTIONS._serialized_start = 4236 + _PROPERTYOPTIONS._serialized_end = 4292 + _GLUPOPTIONS._serialized_start = 4295 + _GLUPOPTIONS._serialized_end = 4626 + _GLUPFIELDOPTIONS._serialized_start = 4629 + _GLUPFIELDOPTIONS._serialized_end = 4806 + _JSONMAPPING._serialized_start = 4808 + _JSONMAPPING._serialized_end = 4849 + _JSONALIAS._serialized_start = 4851 + _JSONALIAS._serialized_end = 4903 + _BASEGLUPMESSAGE._serialized_start = 4906 + _BASEGLUPMESSAGE._serialized_end = 5215 + _BASEGLUPMESSAGE_SETFIELDSENTRY._serialized_start = 5161 + _BASEGLUPMESSAGE_SETFIELDSENTRY._serialized_end = 5209 + _FORWARDEDWATERMARKMESSAGE._serialized_start = 5218 + _FORWARDEDWATERMARKMESSAGE._serialized_end = 5460 + _LOCATION._serialized_start = 5462 + _LOCATION._serialized_end = 5583 + _ORIGIN._serialized_start = 5586 + _ORIGIN._serialized_end = 5748 + _CONTROLMESSAGE._serialized_start = 5751 + _CONTROLMESSAGE._serialized_end = 6400 + _CONTROLMESSAGE_WATERMARKORIGIN._serialized_start = 5828 + _CONTROLMESSAGE_WATERMARKORIGIN._serialized_end = 5965 + _CONTROLMESSAGE_WATERMARK._serialized_start = 5968 + _CONTROLMESSAGE_WATERMARK._serialized_end = 6328 + _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._serialized_start = 5161 + _CONTROLMESSAGE_WATERMARK_SETFIELDSENTRY._serialized_end = 5209 + _CONTROLMESSAGE_CLUSTER._serialized_start = 6330 + _CONTROLMESSAGE_CLUSTER._serialized_end = 6400 + _PARTITION._serialized_start = 6403 + _PARTITION._serialized_end = 6556 + _HDFSPARTITION._serialized_start = 6559 + _HDFSPARTITION._serialized_end = 6706 + _HASH128._serialized_start = 6709 + _HASH128._serialized_end = 6874 + _HASH128_SETFIELDSENTRY._serialized_start = 5161 + _HASH128_SETFIELDSENTRY._serialized_end = 5209 # @@protoc_insertion_point(module_scope) diff --git a/tox.ini b/tox.ini index 354cf5c13..8416051be 100644 --- a/tox.ini +++ b/tox.ini @@ -31,10 +31,18 @@ commands = mypy src/confluent_kafka deps = black>=24.0.0 commands = black --check --diff . +[testenv:black-fix] +deps = black>=24.0.0 +commands = black . + [testenv:isort] deps = isort>=5.13.0 commands = isort --check-only --diff . +[testenv:isort-fix] +deps = isort>=5.13.0 +commands = isort . + [pytest] python_files = test_* testpaths = tests From c6ba556f2ceeecd2479730c67b70e74a908e512b Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Wed, 19 Nov 2025 16:22:13 -0800 Subject: [PATCH 16/25] Resolved all type hinting issues locally --- .../common/schema_registry_client.py | 36 ++++++++ .../dek_registry/dek_registry_client.py | 89 +++++++++++++++++++ .../rules/encryption/encrypt_executor.py | 2 +- 3 files changed, 126 insertions(+), 1 deletion(-) diff --git a/src/confluent_kafka/schema_registry/common/schema_registry_client.py b/src/confluent_kafka/schema_registry/common/schema_registry_client.py index afdefd08f..1b4be6e80 100644 --- a/src/confluent_kafka/schema_registry/common/schema_registry_client.py +++ b/src/confluent_kafka/schema_registry/common/schema_registry_client.py @@ -707,6 +707,12 @@ class SchemaReference: subject: Optional[str] version: Optional[int] + def __init__(self, name: Optional[str] = None, subject: Optional[str] = None, version: Optional[int] = None): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'name', name) + object.__setattr__(self, 'subject', subject) + object.__setattr__(self, 'version', version) + def to_dict(self) -> Dict[str, Any]: name = self.name @@ -889,6 +895,21 @@ class Schema: metadata: Optional[Metadata] = None rule_set: Optional[RuleSet] = None + def __init__( + self, + schema_str: Optional[str] = None, + schema_type: Optional[str] = "AVRO", + references: Optional[List[SchemaReference]] = None, + metadata: Optional[Metadata] = None, + rule_set: Optional[RuleSet] = None, + ): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'schema_str', schema_str) + object.__setattr__(self, 'schema_type', schema_type) + object.__setattr__(self, 'references', references) + object.__setattr__(self, 'metadata', metadata) + object.__setattr__(self, 'rule_set', rule_set) + def to_dict(self) -> Dict[str, Any]: schema = self.schema_str schema_type = self.schema_type @@ -977,6 +998,21 @@ class RegisteredSchema: guid: Optional[str] schema: Schema + def __init__( + self, + subject: Optional[str], + version: Optional[int], + schema_id: Optional[int], + guid: Optional[str], + schema: Schema, + ): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'subject', subject) + object.__setattr__(self, 'version', version) + object.__setattr__(self, 'schema_id', schema_id) + object.__setattr__(self, 'guid', guid) + object.__setattr__(self, 'schema', schema) + def to_dict(self) -> Dict[str, Any]: schema = self.schema diff --git a/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py b/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py index c0211f930..f081a7dd8 100644 --- a/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py +++ b/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py @@ -35,6 +35,10 @@ class KekKmsProps: properties: Dict[str, str] = _attrs_field(init=False, factory=dict) + def __init__(self, properties: Dict[str, str]): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'properties', properties) + def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.properties) @@ -61,6 +65,27 @@ class Kek: ts: Optional[int] = _attrs_field(default=None) deleted: Optional[bool] = _attrs_field(default=None) + def __init__( + self, + name: Optional[str] = None, + kms_type: Optional[str] = None, + kms_key_id: Optional[str] = None, + kms_props: Optional[KekKmsProps] = None, + doc: Optional[str] = None, + shared: Optional[bool] = None, + ts: Optional[int] = None, + deleted: Optional[bool] = None, + ): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'name', name) + object.__setattr__(self, 'kms_type', kms_type) + object.__setattr__(self, 'kms_key_id', kms_key_id) + object.__setattr__(self, 'kms_props', kms_props) + object.__setattr__(self, 'doc', doc) + object.__setattr__(self, 'shared', shared) + object.__setattr__(self, 'ts', ts) + object.__setattr__(self, 'deleted', deleted) + def to_dict(self) -> Dict[str, Any]: name = self.name @@ -147,6 +172,23 @@ class CreateKekRequest: doc: Optional[str] shared: Optional[bool] + def __init__( + self, + name: Optional[str] = None, + kms_type: Optional[str] = None, + kms_key_id: Optional[str] = None, + kms_props: Optional[KekKmsProps] = None, + doc: Optional[str] = None, + shared: Optional[bool] = None, + ): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'name', name) + object.__setattr__(self, 'kms_type', kms_type) + object.__setattr__(self, 'kms_key_id', kms_key_id) + object.__setattr__(self, 'kms_props', kms_props) + object.__setattr__(self, 'doc', doc) + object.__setattr__(self, 'shared', shared) + def to_dict(self) -> Dict[str, Any]: name = self.name @@ -233,6 +275,27 @@ class Dek: deleted: Optional[bool] = _attrs_field(default=None) _lock: threading.Lock = _attrs_field(factory=threading.Lock, init=False, eq=False, order=False) + def __init__( + self, + kek_name: Optional[str] = None, + subject: Optional[str] = None, + version: Optional[int] = None, + algorithm: Optional[DekAlgorithm] = None, + encrypted_key_material: Optional[str] = None, + key_material: Optional[str] = None, + ts: Optional[int] = None, + deleted: Optional[bool] = None, + ): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'kek_name', kek_name) + object.__setattr__(self, 'subject', subject) + object.__setattr__(self, 'version', version) + object.__setattr__(self, 'algorithm', algorithm) + object.__setattr__(self, 'encrypted_key_material', encrypted_key_material) + object.__setattr__(self, 'key_material', key_material) + object.__setattr__(self, 'ts', ts) + object.__setattr__(self, 'deleted', deleted) + def get_encrypted_key_material_bytes(self) -> Optional[bytes]: if self.encrypted_key_material is None: return None @@ -342,6 +405,19 @@ class CreateDekRequest: algorithm: Optional[DekAlgorithm] encrypted_key_material: Optional[str] + def __init__( + self, + subject: Optional[str] = None, + version: Optional[int] = None, + algorithm: Optional[DekAlgorithm] = None, + encrypted_key_material: Optional[str] = None, + ): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'subject', subject) + object.__setattr__(self, 'version', version) + object.__setattr__(self, 'algorithm', algorithm) + object.__setattr__(self, 'encrypted_key_material', encrypted_key_material) + def to_dict(self) -> Dict[str, Any]: subject = self.subject @@ -396,6 +472,11 @@ class KekId: name: str deleted: bool + def __init__(self, name: str, deleted: bool): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'name', name) + object.__setattr__(self, 'deleted', deleted) + @_attrs_define(frozen=True) class DekId: @@ -405,6 +486,14 @@ class DekId: algorithm: DekAlgorithm deleted: bool + def __init__(self, kek_name: str, subject: str, version: int, algorithm: DekAlgorithm, deleted: bool): + # This is needed because mypy fails to infer the type of the attributes when using the constructor. + object.__setattr__(self, 'kek_name', kek_name) + object.__setattr__(self, 'subject', subject) + object.__setattr__(self, 'version', version) + object.__setattr__(self, 'algorithm', algorithm) + object.__setattr__(self, 'deleted', deleted) + class _KekCache(object): def __init__(self): diff --git a/src/confluent_kafka/schema_registry/rules/encryption/encrypt_executor.py b/src/confluent_kafka/schema_registry/rules/encryption/encrypt_executor.py index 6a0b03436..454613004 100644 --- a/src/confluent_kafka/schema_registry/rules/encryption/encrypt_executor.py +++ b/src/confluent_kafka/schema_registry/rules/encryption/encrypt_executor.py @@ -365,7 +365,7 @@ def _is_expired(self, ctx: RuleContext, dek: Optional[Dek]) -> bool: ctx.rule_mode != RuleMode.READ and self._dek_expiry_days > 0 and dek is not None - and (now - dek.ts) / MILLIS_IN_DAY > self._dek_expiry_days + and (now - (dek.ts or 0)) / MILLIS_IN_DAY > self._dek_expiry_days ) # type: ignore[operator] def transform(self, ctx: RuleContext, field_type: FieldType, field_value: Any) -> Any: From 9886ed49d103571f209509aee8f6baf262dd1df8 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 09:54:49 -0800 Subject: [PATCH 17/25] Applied unasync --- .../schema_registry/_sync/schema_registry_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py b/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py index 7f1f3cb45..5c357060b 100644 --- a/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py +++ b/src/confluent_kafka/schema_registry/_sync/schema_registry_client.py @@ -705,7 +705,7 @@ def register_schema_full_response( `POST Subject Version API Reference `_ """ # noqa: E501 - schema_id: Optional[int] = self._cache.get_id_by_schema(subject_name, schema) + schema_id = self._cache.get_id_by_schema(subject_name, schema) if schema_id is not None: result = self._cache.get_schema_by_id(subject_name, schema_id) if result is not None: From 8dbe2d0bbb70463b894a05ef2a82b0f8ad753983 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 12:28:57 -0800 Subject: [PATCH 18/25] Resolved mypy failures --- requirements/requirements-tests.txt | 1 + .../common/schema_registry_client.py | 36 -------- .../dek_registry/dek_registry_client.py | 89 ------------------- tox.ini | 5 +- 4 files changed, 5 insertions(+), 126 deletions(-) diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt index 23c4eabdd..099213039 100644 --- a/requirements/requirements-tests.txt +++ b/requirements/requirements-tests.txt @@ -2,6 +2,7 @@ urllib3<3 flake8 mypy +attrs types-cachetools types-requests orjson diff --git a/src/confluent_kafka/schema_registry/common/schema_registry_client.py b/src/confluent_kafka/schema_registry/common/schema_registry_client.py index 1b4be6e80..afdefd08f 100644 --- a/src/confluent_kafka/schema_registry/common/schema_registry_client.py +++ b/src/confluent_kafka/schema_registry/common/schema_registry_client.py @@ -707,12 +707,6 @@ class SchemaReference: subject: Optional[str] version: Optional[int] - def __init__(self, name: Optional[str] = None, subject: Optional[str] = None, version: Optional[int] = None): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'name', name) - object.__setattr__(self, 'subject', subject) - object.__setattr__(self, 'version', version) - def to_dict(self) -> Dict[str, Any]: name = self.name @@ -895,21 +889,6 @@ class Schema: metadata: Optional[Metadata] = None rule_set: Optional[RuleSet] = None - def __init__( - self, - schema_str: Optional[str] = None, - schema_type: Optional[str] = "AVRO", - references: Optional[List[SchemaReference]] = None, - metadata: Optional[Metadata] = None, - rule_set: Optional[RuleSet] = None, - ): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'schema_str', schema_str) - object.__setattr__(self, 'schema_type', schema_type) - object.__setattr__(self, 'references', references) - object.__setattr__(self, 'metadata', metadata) - object.__setattr__(self, 'rule_set', rule_set) - def to_dict(self) -> Dict[str, Any]: schema = self.schema_str schema_type = self.schema_type @@ -998,21 +977,6 @@ class RegisteredSchema: guid: Optional[str] schema: Schema - def __init__( - self, - subject: Optional[str], - version: Optional[int], - schema_id: Optional[int], - guid: Optional[str], - schema: Schema, - ): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'subject', subject) - object.__setattr__(self, 'version', version) - object.__setattr__(self, 'schema_id', schema_id) - object.__setattr__(self, 'guid', guid) - object.__setattr__(self, 'schema', schema) - def to_dict(self) -> Dict[str, Any]: schema = self.schema diff --git a/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py b/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py index f081a7dd8..c0211f930 100644 --- a/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py +++ b/src/confluent_kafka/schema_registry/rules/encryption/dek_registry/dek_registry_client.py @@ -35,10 +35,6 @@ class KekKmsProps: properties: Dict[str, str] = _attrs_field(init=False, factory=dict) - def __init__(self, properties: Dict[str, str]): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'properties', properties) - def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.properties) @@ -65,27 +61,6 @@ class Kek: ts: Optional[int] = _attrs_field(default=None) deleted: Optional[bool] = _attrs_field(default=None) - def __init__( - self, - name: Optional[str] = None, - kms_type: Optional[str] = None, - kms_key_id: Optional[str] = None, - kms_props: Optional[KekKmsProps] = None, - doc: Optional[str] = None, - shared: Optional[bool] = None, - ts: Optional[int] = None, - deleted: Optional[bool] = None, - ): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'name', name) - object.__setattr__(self, 'kms_type', kms_type) - object.__setattr__(self, 'kms_key_id', kms_key_id) - object.__setattr__(self, 'kms_props', kms_props) - object.__setattr__(self, 'doc', doc) - object.__setattr__(self, 'shared', shared) - object.__setattr__(self, 'ts', ts) - object.__setattr__(self, 'deleted', deleted) - def to_dict(self) -> Dict[str, Any]: name = self.name @@ -172,23 +147,6 @@ class CreateKekRequest: doc: Optional[str] shared: Optional[bool] - def __init__( - self, - name: Optional[str] = None, - kms_type: Optional[str] = None, - kms_key_id: Optional[str] = None, - kms_props: Optional[KekKmsProps] = None, - doc: Optional[str] = None, - shared: Optional[bool] = None, - ): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'name', name) - object.__setattr__(self, 'kms_type', kms_type) - object.__setattr__(self, 'kms_key_id', kms_key_id) - object.__setattr__(self, 'kms_props', kms_props) - object.__setattr__(self, 'doc', doc) - object.__setattr__(self, 'shared', shared) - def to_dict(self) -> Dict[str, Any]: name = self.name @@ -275,27 +233,6 @@ class Dek: deleted: Optional[bool] = _attrs_field(default=None) _lock: threading.Lock = _attrs_field(factory=threading.Lock, init=False, eq=False, order=False) - def __init__( - self, - kek_name: Optional[str] = None, - subject: Optional[str] = None, - version: Optional[int] = None, - algorithm: Optional[DekAlgorithm] = None, - encrypted_key_material: Optional[str] = None, - key_material: Optional[str] = None, - ts: Optional[int] = None, - deleted: Optional[bool] = None, - ): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'kek_name', kek_name) - object.__setattr__(self, 'subject', subject) - object.__setattr__(self, 'version', version) - object.__setattr__(self, 'algorithm', algorithm) - object.__setattr__(self, 'encrypted_key_material', encrypted_key_material) - object.__setattr__(self, 'key_material', key_material) - object.__setattr__(self, 'ts', ts) - object.__setattr__(self, 'deleted', deleted) - def get_encrypted_key_material_bytes(self) -> Optional[bytes]: if self.encrypted_key_material is None: return None @@ -405,19 +342,6 @@ class CreateDekRequest: algorithm: Optional[DekAlgorithm] encrypted_key_material: Optional[str] - def __init__( - self, - subject: Optional[str] = None, - version: Optional[int] = None, - algorithm: Optional[DekAlgorithm] = None, - encrypted_key_material: Optional[str] = None, - ): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'subject', subject) - object.__setattr__(self, 'version', version) - object.__setattr__(self, 'algorithm', algorithm) - object.__setattr__(self, 'encrypted_key_material', encrypted_key_material) - def to_dict(self) -> Dict[str, Any]: subject = self.subject @@ -472,11 +396,6 @@ class KekId: name: str deleted: bool - def __init__(self, name: str, deleted: bool): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'name', name) - object.__setattr__(self, 'deleted', deleted) - @_attrs_define(frozen=True) class DekId: @@ -486,14 +405,6 @@ class DekId: algorithm: DekAlgorithm deleted: bool - def __init__(self, kek_name: str, subject: str, version: int, algorithm: DekAlgorithm, deleted: bool): - # This is needed because mypy fails to infer the type of the attributes when using the constructor. - object.__setattr__(self, 'kek_name', kek_name) - object.__setattr__(self, 'subject', subject) - object.__setattr__(self, 'version', version) - object.__setattr__(self, 'algorithm', algorithm) - object.__setattr__(self, 'deleted', deleted) - class _KekCache(object): def __init__(self): diff --git a/tox.ini b/tox.ini index 8416051be..e9d4d3e17 100644 --- a/tox.ini +++ b/tox.ini @@ -25,7 +25,10 @@ deps = mypy types-cachetools types-requests~=2.32.0 -commands = mypy src/confluent_kafka +commands = + # Need attrs to be explicitly installed for mypy to find frozen class init definitions + pip install attrs + mypy src/confluent_kafka [testenv:black] deps = black>=24.0.0 From b8c0a1b023c5a557a291d25d24789108a7dbdeb3 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 15:18:42 -0800 Subject: [PATCH 19/25] Flake8 fix plus print for clang-format version --- tests/ducktape/run_ducktape_test.py | 4 ++-- tools/source-package-verification.sh | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/ducktape/run_ducktape_test.py b/tests/ducktape/run_ducktape_test.py index bdff04e83..20faf6a42 100755 --- a/tests/ducktape/run_ducktape_test.py +++ b/tests/ducktape/run_ducktape_test.py @@ -135,7 +135,7 @@ def run_all_tests(args): print("=" * 70) for test_type in test_types: - print(f"\n{'='*20} Running {test_type.upper()} Tests {'='*20}") + print(f"\n{'='*20} Running {test_type.upper()} Tests {'='*20}") # noqa: E226 # Create a new args object for this test type test_args = argparse.Namespace(test_type=test_type, test_method=args.test_method, debug=args.debug) @@ -148,7 +148,7 @@ def run_all_tests(args): else: print(f"\nāœ… {test_type.upper()} tests passed!") - print(f"\n{'='*70}") + print(f"\n{'='*70}") # noqa: E226 if overall_success: print("šŸŽ‰ All tests completed successfully!") return 0 diff --git a/tools/source-package-verification.sh b/tools/source-package-verification.sh index 3f966ecde..530107f26 100755 --- a/tools/source-package-verification.sh +++ b/tools/source-package-verification.sh @@ -53,6 +53,7 @@ if [[ $OS_NAME == linux && $ARCH == x64 ]]; then echo "Checking code formatting ..." # Check all tracked files (Python and C) all_files=$(git ls-tree -r --name-only HEAD | egrep '\.(py|c|h)$') + clang-format --version tools/style-format.sh $all_files || exit 1 echo "Building documentation ..." flake8 --exclude ./_venv,*_pb2.py,./build From 45e9a5a72b8877f5ba4df25f82bb3ea22b7d7784 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 15:38:55 -0800 Subject: [PATCH 20/25] Reverting admin.c to clang-format 18 variant --- src/confluent_kafka/src/Admin.c | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/confluent_kafka/src/Admin.c b/src/confluent_kafka/src/Admin.c index e56a3bbf5..06953e867 100644 --- a/src/confluent_kafka/src/Admin.c +++ b/src/confluent_kafka/src/Admin.c @@ -99,12 +99,12 @@ struct Admin_options { * Make sure this is kept up to date with Admin_options above. */ #define Admin_options_INITIALIZER \ { \ - Admin_options_def_int, Admin_options_def_float, \ - Admin_options_def_float, Admin_options_def_int, \ - Admin_options_def_int, Admin_options_def_int, \ - Admin_options_def_int, Admin_options_def_ptr, \ - Admin_options_def_cnt, Admin_options_def_ptr, \ - Admin_options_def_cnt, \ + Admin_options_def_int, Admin_options_def_float, \ + Admin_options_def_float, Admin_options_def_int, \ + Admin_options_def_int, Admin_options_def_int, \ + Admin_options_def_int, Admin_options_def_ptr, \ + Admin_options_def_cnt, Admin_options_def_ptr, \ + Admin_options_def_cnt, \ } #define Admin_options_is_set_int(v) ((v) != Admin_options_def_int) From dc88b972f14df31b609d106c8b52b78508ff632c Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 16:29:45 -0800 Subject: [PATCH 21/25] Resolved new avro type checks --- .../schema_registry/common/avro.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/confluent_kafka/schema_registry/common/avro.py b/src/confluent_kafka/schema_registry/common/avro.py index f7ab32dfe..dc514f6ab 100644 --- a/src/confluent_kafka/schema_registry/common/avro.py +++ b/src/confluent_kafka/schema_registry/common/avro.py @@ -5,7 +5,7 @@ from collections import defaultdict from copy import deepcopy from io import BytesIO -from typing import Dict, Optional, Set, Tuple, Union +from typing import Dict, Optional, Set, Tuple, Union, cast from fastavro import repository, validate from fastavro.schema import load_schema @@ -217,11 +217,17 @@ def _resolve_union(schema: AvroSchema, message: AvroMessage) -> Tuple[Optional[A for subschema in schema: try: if is_wrapped_union: - if isinstance(subschema, dict) and subschema["name"] == message[0]: - return (subschema, message[1]) + if isinstance(subschema, dict): + dict_schema = cast(dict, subschema) + tuple_message = cast(tuple, message) + if dict_schema["name"] == tuple_message[0]: + return (dict_schema, tuple_message[1]) elif is_typed_union: - if isinstance(subschema, dict) and subschema["name"] == message['-type']: - return (subschema, message) + if isinstance(subschema, dict): + dict_schema = cast(dict, subschema) + dict_message = cast(dict, message) + if dict_schema["name"] == dict_message['-type']: + return (dict_schema, dict_message) else: validate(message, subschema) return (subschema, message) From bba198b04ddcf1650c0d7c99454f3341ad6188f1 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 16:47:36 -0800 Subject: [PATCH 22/25] Added mypy ignore for celpy calls --- .../schema_registry/rules/cel/string_format.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/confluent_kafka/schema_registry/rules/cel/string_format.py b/src/confluent_kafka/schema_registry/rules/cel/string_format.py index 5f45beb83..8a544a29b 100644 --- a/src/confluent_kafka/schema_registry/rules/cel/string_format.py +++ b/src/confluent_kafka/schema_registry/rules/cel/string_format.py @@ -44,11 +44,11 @@ def __init__(self, locale: str): def format(self, fmt: celtypes.Value, args: celtypes.Value) -> celpy.Result: if not isinstance(fmt, celtypes.StringType): return celpy.native_to_cel( # type: ignore[attr-defined] - celpy.new_error("format() requires a string as the first argument") + celpy.new_error("format() requires a string as the first argument") # type: ignore[attr-defined] ) # type: ignore[attr-defined] if not isinstance(args, celtypes.ListType): return celpy.native_to_cel( # type: ignore[attr-defined] - celpy.new_error("format() requires a list as the second argument") + celpy.new_error("format() requires a list as the second argument") # type: ignore[attr-defined] ) # type: ignore[attr-defined] # printf style formatting i = 0 From e71442addf6c55da5306b336dde2256d8d1d9768 Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 18:42:08 -0800 Subject: [PATCH 23/25] Upped memory limit as mypy increased the overhead slightly --- tests/ducktape/README.md | 2 +- tests/ducktape/transaction_benchmark_bounds.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ducktape/README.md b/tests/ducktape/README.md index 9bbbfad70..dca9cc06c 100644 --- a/tests/ducktape/README.md +++ b/tests/ducktape/README.md @@ -51,7 +51,7 @@ The bounds configuration supports different environments with different performa "max_error_rate": 0.02, "min_success_rate": 0.98, "max_p99_latency_ms": 3000.0, - "max_memory_growth_mb": 800.0, + "max_memory_growth_mb": 1000.0, "max_buffer_full_rate": 0.05, "min_messages_per_poll": 10.0 }, diff --git a/tests/ducktape/transaction_benchmark_bounds.json b/tests/ducktape/transaction_benchmark_bounds.json index 81fbc3148..68446886f 100644 --- a/tests/ducktape/transaction_benchmark_bounds.json +++ b/tests/ducktape/transaction_benchmark_bounds.json @@ -7,7 +7,7 @@ "max_error_rate": 0.02, "min_success_rate": 0.98, "max_p99_latency_ms": 8000.0, - "max_memory_growth_mb": 800.0, + "max_memory_growth_mb": 1000.0, "max_buffer_full_rate": 0.05, "min_messages_per_poll": 0.0 }, From 04b17629f6157f66b6cb8df05a126ba21ea7aced Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 20:28:00 -0800 Subject: [PATCH 24/25] Bumped memory for mypy overhead --- tests/ducktape/README.md | 2 +- tests/ducktape/consumer_benchmark_metrics.py | 2 +- tests/ducktape/transaction_benchmark_bounds.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/ducktape/README.md b/tests/ducktape/README.md index dca9cc06c..d09224004 100644 --- a/tests/ducktape/README.md +++ b/tests/ducktape/README.md @@ -62,7 +62,7 @@ The bounds configuration supports different environments with different performa "max_error_rate": 0.01, "min_success_rate": 0.99, "max_p99_latency_ms": 2500.0, - "max_memory_growth_mb": 600.0, + "max_memory_growth_mb": 700.0, "max_buffer_full_rate": 0.03, "min_messages_per_poll": 15.0 }, diff --git a/tests/ducktape/consumer_benchmark_metrics.py b/tests/ducktape/consumer_benchmark_metrics.py index 8cdd10154..5bcdded66 100644 --- a/tests/ducktape/consumer_benchmark_metrics.py +++ b/tests/ducktape/consumer_benchmark_metrics.py @@ -230,7 +230,7 @@ def __init__( max_p95_latency_ms: float = 10000.0, min_success_rate: float = 0.90, max_error_rate: float = 0.05, - max_memory_growth_mb: float = 600.0, + max_memory_growth_mb: float = 700.0, min_messages_per_consume: float = 0.5, max_empty_consume_rate: float = 0.5, ): diff --git a/tests/ducktape/transaction_benchmark_bounds.json b/tests/ducktape/transaction_benchmark_bounds.json index 68446886f..623be3ca3 100644 --- a/tests/ducktape/transaction_benchmark_bounds.json +++ b/tests/ducktape/transaction_benchmark_bounds.json @@ -18,7 +18,7 @@ "max_error_rate": 0.01, "min_success_rate": 0.99, "max_p99_latency_ms": 7000.0, - "max_memory_growth_mb": 600.0, + "max_memory_growth_mb": 700.0, "max_buffer_full_rate": 0.03, "min_messages_per_poll": 0.0 }, From 5bc1bc3bf3f92e730eb85a770ba0c52fd06e7d5c Mon Sep 17 00:00:00 2001 From: Matthew Seal Date: Thu, 20 Nov 2025 21:26:37 -0800 Subject: [PATCH 25/25] Adding file missed in prior commit --- tests/ducktape/producer_benchmark_bounds.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ducktape/producer_benchmark_bounds.json b/tests/ducktape/producer_benchmark_bounds.json index 5fca529ae..afbddc376 100644 --- a/tests/ducktape/producer_benchmark_bounds.json +++ b/tests/ducktape/producer_benchmark_bounds.json @@ -7,7 +7,7 @@ "max_error_rate": 0.02, "min_success_rate": 0.98, "max_p99_latency_ms": 7000.0, - "max_memory_growth_mb": 800.0, + "max_memory_growth_mb": 1000.0, "max_buffer_full_rate": 0.05, "min_messages_per_poll": 5.0 }, @@ -18,7 +18,7 @@ "max_error_rate": 0.01, "min_success_rate": 0.99, "max_p99_latency_ms": 12000.0, - "max_memory_growth_mb": 800.0, + "max_memory_growth_mb": 1000.0, "max_buffer_full_rate": 0.03, "min_messages_per_poll": 5.0 },