diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 824b9c3..0000000 --- a/.editorconfig +++ /dev/null @@ -1,39 +0,0 @@ -# Generated from: -# https://github.com/zopefoundation/meta/tree/master/config/zope-product -# -# EditorConfig Configuration file, for more details see: -# http://EditorConfig.org -# EditorConfig is a convention description, that could be interpreted -# by multiple editors to enforce common coding conventions for specific -# file types - -# top-most EditorConfig file: -# Will ignore other EditorConfig files in Home directory or upper tree level. -root = true - - -[*] # For All Files -# Unix-style newlines with a newline ending every file -end_of_line = lf -insert_final_newline = true -trim_trailing_whitespace = true -# Set default charset -charset = utf-8 -# Indent style default -indent_style = space -# Max Line Length - a hard line wrap, should be disabled -max_line_length = off - -[*.{py,cfg,ini}] -# 4 space indentation -indent_size = 4 - -[*.{yml,zpt,pt,dtml,zcml}] -# 2 space indentation -indent_size = 2 - -[{Makefile,.gitmodules}] -# Tab indentation (no size specified, but view as 4 spaces) -indent_style = tab -indent_size = unset -tab_width = unset diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 15d6aaf..a11340c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,63 +1,144 @@ -# Generated from: -# https://github.com/zopefoundation/meta/tree/master/config/zope-product -name: tests +name: Products.mcdutils on: push: - pull_request: - schedule: - - cron: '0 12 * * 0' # run once a week on Sunday # Allow to run this workflow manually from the Actions tab workflow_dispatch: +env: + PYTHON_VERSION: "3.12" + PLONE_VERSION: "6.1.2" + jobs: - build: + + lint: + runs-on: ubuntu-latest + name: "Lint codebase" + steps: + + - name: Checkout + uses: actions/checkout@v4 + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + enable-cache: true + + - name: Check formatting + if: ${{ success() || failure() }} + id: ruff-format + run: uvx ruff@latest format --diff + + - name: Check lint + if: ${{ success() || failure() }} + id: ruff-lint + run: uvx ruff@latest check --diff + + - name: Check XML / ZCML + if: ${{ success() || failure() }} + id: zpretty + run: uvx zpretty@latest --check src + + - name: Check Package Metadata + if: ${{ success() || failure() }} + id: pyroma + run: uvx pyroma@latest -d . + + - name: Check Python Versions + if: ${{ success() || failure() }} + id: py-versions + run: uvx check-python-versions@latest . + + - name: Report + if: ${{ success() || failure() }} + run: | + echo '# Code Analysis' >> $GITHUB_STEP_SUMMARY + echo '| Test | Status |' >> $GITHUB_STEP_SUMMARY + echo '| --- | --- |' >> $GITHUB_STEP_SUMMARY + echo '| Format | ${{ steps.ruff-format.conclusion == 'failure' && '❌' || ' ✅' }} |' >> $GITHUB_STEP_SUMMARY + echo '| Lint | ${{ steps.ruff-lint.conclusion == 'failure' && '❌' || ' ✅' }} |' >> $GITHUB_STEP_SUMMARY + echo '| XML / ZCML | ${{ steps.zpretty.conclusion == 'failure' && '❌' || ' ✅' }} |' >> $GITHUB_STEP_SUMMARY + echo '| Package Metadata | ${{ steps.pyroma.conclusion == 'failure' && '❌' || ' ✅' }} |' >> $GITHUB_STEP_SUMMARY + echo '| Python Versions | ${{ steps.py-versions.conclusion == 'failure' && '❌' || ' ✅' }} |' >> $GITHUB_STEP_SUMMARY + + test: + runs-on: ubuntu-latest + services: + memcached: + image: memcached:latest + ports: + - 11211:11211 strategy: - # We want to see all failures: fail-fast: false matrix: - os: - - ["ubuntu", "ubuntu-20.04"] - config: - # [Python version, tox env] - - ["3.9", "release-check"] - - ["3.9", "lint"] - - ["3.7", "py37"] - - ["3.8", "py38"] - - ["3.9", "py39"] - - ["3.10", "py310"] - - ["3.11", "py311"] - - ["3.12", "py312"] - - ["3.9", "docs"] - - ["3.9", "coverage"] - - runs-on: ${{ matrix.os[1] }} - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name - name: ${{ matrix.config[1] }} + python-version: ['3.11', '3.12', '3.13'] + plone-version: ['6.1-latest', '6.0-latest'] + env: + PYTHON_VERSION: ${{ matrix.python-version }} + PLONE_VERSION: ${{ matrix.plone-version }} steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.config[0] }} - - name: Pip cache - uses: actions/cache@v3 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ matrix.config[0] }}-${{ hashFiles('setup.*', 'tox.ini') }} - restore-keys: | - ${{ runner.os }}-pip-${{ matrix.config[0] }}- - ${{ runner.os }}-pip- - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install tox - - name: Test - run: tox -e ${{ matrix.config[1] }} - - name: Coverage - if: matrix.config[1] == 'coverage' - run: | - pip install coveralls - coveralls --service=github - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Checkout + uses: actions/checkout@v4 + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + enable-cache: false + + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: ${{ env.UV_CACHE_DIR }} + key: uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.PLONE_VERSION }}-${{ hashFiles('pyproject.toml') }} + restore-keys: | + uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.PLONE_VERSION }}-${{ hashFiles('pyproject.toml') }} + uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.PLONE_VERSION }} + + - name: Run tests + run: make test + env: + MEMCACHED_SERVER: localhost:11211 + + coverage: + runs-on: ubuntu-latest + name: "Test Coverage" + needs: + - test + services: + memcached: + image: memcached:latest + ports: + - 11211:11211 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + enable-cache: false + + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: /tmp/.uv-cache + key: uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.PLONE_VERSION }}-${{ hashFiles('pyproject.toml') }} + restore-keys: | + uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.PLONE_VERSION }}-${{ hashFiles('pyproject.toml') }} + uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.PLONE_VERSION }} + + - name: Run tests + run: make test-coverage + env: + MEMCACHED_SERVER: localhost:11211 + + - name: Report Coverage + run: | + echo "# Coverage Report" >> $GITHUB_STEP_SUMMARY + echo "$(uv run coverage report --format markdown)" >> $GITHUB_STEP_SUMMARY + env: + MEMCACHED_SERVER: localhost:11211 \ No newline at end of file diff --git a/.gitignore b/.gitignore index ce7f677..38f28d3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,32 +1,65 @@ # Generated from: -# https://github.com/zopefoundation/meta/tree/master/config/zope-product -*.dll -*.egg-info/ -*.profraw +# https://github.com/plone/meta/tree/main/config/default +# See the inline comments on how to expand/tweak this configuration file +# python related +*.egg-info *.pyc *.pyo -*.so -.coverage -.coverage.* -.eggs/ -.installed.cfg -.mr.developer.cfg -.tox/ -.vscode/ -__pycache__/ -bin/ + +# translation related +*.mo + +# tools related build/ +.coverage +.*project coverage.xml -develop-eggs/ -develop/ +coverage-html-report/ dist/ docs/_build +__pycache__/ +.tox +.vscode/ +node_modules/ + +# venv / buildout related +bin/ +develop-eggs/ eggs/ +.eggs/ etc/ +.installed.cfg +include/ lib/ lib64 -log/ +.mr.developer.cfg parts/ pyvenv.cfg -testing.log var/ +local.cfg + +# mxdev +/instance/ +/.make-sentinels/ +/*-mxdev.txt +/reports/ +/sources/ +/venv/ +.installed.txt + +.idea +pyvenv.cfg +*.mo +.env +*/node_modules/* +.scannerwork +uv.lock +dependencies.svg +forest.dot +forest.json +docs/_build +.venv/ +requirements.txt +constraints-mxdev.txt +requirements-mxdev.txt +constraints-zope.txt \ No newline at end of file diff --git a/.meta.toml b/.meta.toml deleted file mode 100644 index e7dbe82..0000000 --- a/.meta.toml +++ /dev/null @@ -1,36 +0,0 @@ -# Generated from: -# https://github.com/zopefoundation/meta/tree/master/config/zope-product -[meta] -template = "zope-product" -commit-id = "acd8d239" - -[python] -with-windows = false -with-pypy = false -with-future-python = false -with-docs = true -with-sphinx-doctests = false -with-macos = false - -[tox] -use-flake8 = true - -[coverage] -fail-under = 84 - -[isort] -known_third_party = "memcache, six" - -[check-manifest] -additional-ignores = [ - "docs/_build/html/_images/*", - "docs/_build/html/_static/*", - ] - -[manifest] -additional-rules = [ - "include *.yaml", - "include *.txt", - "recursive-include docs *.png", - "recursive-include src *.pt", - ] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..2d5837e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,46 @@ +# Generated from: +# https://github.com/plone/meta/tree/main/config/default +# See the inline comments on how to expand/tweak this configuration file +ci: + autofix_prs: false + autoupdate_schedule: monthly + +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.9 + hooks: + # Run the linter. + - id: ruff + args: [--fix] + # Run the formatter. + - id: ruff-format + - repo: https://github.com/asottile/pyupgrade + rev: v3.20.0 + hooks: + - id: pyupgrade + args: [--py311-plus] + - repo: https://github.com/collective/zpretty + rev: 3.1.1 + hooks: + - id: zpretty + - repo: https://github.com/codespell-project/codespell + rev: v2.3.0 + hooks: + - id: codespell + additional_dependencies: + - tomli + - repo: https://github.com/mgedmin/check-manifest + rev: "0.50" + hooks: + - id: check-manifest + pass_filenames: false + additional_dependencies: [build] + - repo: https://github.com/regebro/pyroma + rev: "5.0" + hooks: + - id: pyroma + - repo: https://github.com/mgedmin/check-python-versions + rev: "0.22.1" + hooks: + - id: check-python-versions + args: ["--only", "setup.py,pyproject.toml"] \ No newline at end of file diff --git a/CHANGES.rst b/CHANGES.rst index 366def7..0e3632d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,8 +1,33 @@ Change log ========== -4.3 (unreleased) ----------------- +5.0.0 (2025-10-06) +------------------- + +- Added module ``reconnecting.py`` with client ``ReconnectingClient``: + + - Automatic reconnection on memcached network/server failures. + - Optional *monkey-patch* replacing ``memcache.Client``. + - Configuration via environment variables: + + - ``MCDUTILS_DISABLE_RECONNECT`` (disables). + - ``MCDUTILS_BACKOFF_MIN_MS`` and ``MCDUTILS_BACKOFF_MAX_MS`` (*backoff* intervals). + - ``MCDUTILS_LOG`` / ``MCDUTILS_LOG_LEVEL`` (structured logs). + - ``MCDUTILS_METRICS`` (enables internal metrics). + + - Prometheus metrics export via function ``export_prometheus_textfile``. + +- Implemented automatic *retry* in ``tpc_vote`` of ``MemCacheMapping``: + + - New function ``_tpc_vote_with_retry`` encapsulates ``MemCacheError`` failures. + - Configuration via environment variables: + + - ``MCDUTILS_DISABLE_TPC_RETRY`` (disables retry). + - ``MCDUTILS_TPC_RETRY_ATTEMPTS`` (number of extra attempts). + - ``MCDUTILS_TPC_RETRY_BACKOFF_MS`` (time between attempts). + + - Additional metrics registration: ``tpc_retry_attempts_total``, + ``tpc_retry_success_total``, ``tpc_retry_fail_total`` and accumulated *backoff* times. 4.2 (2023-12-28) @@ -31,6 +56,7 @@ Change log 3.2 (2021-09-03) ---------------- + - reorganized package to use current zopefoundation standards - claim compatibility with Python 3.9 and Zope 5 @@ -40,11 +66,13 @@ Change log 3.1 (2021-01-01) ---------------- + - revised ZMI 'Test Adding Items to Session' 3.0 (2020-08-07) ---------------- + - packaging cleanup and test fixing due to shifting dependencies - drop Zope 2 compatibility claims and tests @@ -52,18 +80,21 @@ Change log 2.5 (2019-11-13) ---------------- + - implement transaction savepoint support (`#3 `_) 2.4 (2019-10-23) ---------------- + - attempt to hide session values that may contain passwords in ``__repr__`` which is used when rendering the ``REQUEST`` object as string. 2.3 (2019-10-13) ---------------- + - rely on the Zope 4.x branch for Python 2 compatibility - update description to replace Zope2 wording with just Zope @@ -73,17 +104,20 @@ Change log 2.2 (2019-05-21) ---------------- + - add an implementation for ``has_key`` which is gone under Python 3 2.1 (2019-03-31) ---------------- + - fix wrong method call during cache manager record invalidation (`#1 `_) 2.0 (2019-03-28) ---------------- + - make sure ``zcache.aggregateKey`` does not create unsuitable MemCache keys - allow storing values that don't conform to ``IMemCacheMapping`` @@ -98,6 +132,7 @@ Change log Possible breaking change ~~~~~~~~~~~~~~~~~~~~~~~~ + The objects returned by Zope's session data manager are implicitly expected to support Acquisition. Zope's session data manager uses it to insert itself into the object's acquisition chain. However, under Python 3 Acquisition can @@ -109,6 +144,7 @@ session data objects from this package. 1.0 (2019-03-28) ---------------- + - Zope 4 compatibility - documentation using Sphinx @@ -126,11 +162,13 @@ session data objects from this package. 0.2b3 (2011-11-21) ------------------ + - Extend MANIFEST.in to include other missing files (.gif, .pt, .txt) 0.2b2 (2011-11-21) ------------------ + - Fix source distribution by including README.txt and CHANGES.txt via manifest. @@ -139,6 +177,7 @@ session data objects from this package. 0.2b1 (2011-11-19) ------------------ + - Turn product into an egg and release on PyPI. - Implement a forced refresh of the in-process cache of memcache data at the @@ -162,6 +201,7 @@ session data objects from this package. 0.1 (2006-05-31) ---------------- + - CVS tag, 'mcdutils-0_1' -- Initial public release. +- Initial public release. \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in index 26a79ff..32655d9 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,8 +3,7 @@ include *.md include *.rst include *.txt -include buildout.cfg -include tox.ini +include *.py recursive-include docs *.py recursive-include docs *.rst @@ -15,4 +14,4 @@ recursive-include src *.py include *.yaml include *.txt recursive-include docs *.png -recursive-include src *.pt +recursive-include src *.pt \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..55d9c88 --- /dev/null +++ b/Makefile @@ -0,0 +1,256 @@ +## Defensive settings for make: +# https://tech.davis-hansson.com/p/make/ +SHELL:=bash +.ONESHELL: +.SHELLFLAGS:=-eu -o pipefail -O inherit_errexit -c +.SILENT:clean +.DELETE_ON_ERROR: +MAKEFLAGS+=--warn-undefined-variables +MAKEFLAGS+=--no-builtin-rules +NUL := >/dev/null 2>&1 +# We like colors +# From: https://coderwall.com/p/izxssa/colored-makefile-for-golang-projects +RED=`tput setaf 1` +GREEN=`tput setaf 2` +RESET=`tput sgr0` +YELLOW=`tput setaf 3` + +# Python checks +PYTHON?=python3 + +# installed? +ifeq (, $(shell which $(PYTHON) )) + $(error "PYTHON=$(PYTHON) not found in $(PATH)") +endif + +# version ok? +PYTHON_VERSION=$(shell $(PYTHON) -c "import sys; print(float(f'{sys.version_info[0]}.{sys.version_info[1]}'))") +PYTHON_VERSION_MIN=3.11 +PYTHON_VERSION_OK=$(shell $(PYTHON) -c "import sys; print((int(sys.version_info[0]), int(sys.version_info[1])) >= tuple(map(int, '$(PYTHON_VERSION_MIN)'.split('.'))))") + +ifeq ($(PYTHON_VERSION_OK),False) + $(error "Your Python version is $(PYTHON_VERSION). Required Python version >= $(PYTHON_VERSION_MIN).") +endif + +PACKAGE_FOLDER=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) +GIT_FOLDER=$(PACKAGE_FOLDER)/.git +VENV_FOLDER=$(PACKAGE_FOLDER)/.venv +BIN_FOLDER=$(VENV_FOLDER)/bin +ZOPE_VERSION=5.13 + +UV := $(shell command -v uv 2> /dev/null) +ifndef UV + UV := $(UV) +endif + +UVX := $(shell command -v uvx 2> /dev/null) +ifndef UVX + UVX := $(UVX) +endif + +.PHONY: all +all: help + +# Add the following 'help' target to your Makefile +# And add help text after each target name starting with '\#\#' +.PHONY: help +help: # This help message + @grep -E '^[a-zA-Z_-]+:.*?# .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?# "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + + +.PHONY: venv +venv: ## virtualenv + @echo "$(GREEN)==> Setup Virtual Env$(RESET)" + if [ ! -d $(VENV_FOLDER) ]; then + @$(PYTHON) -m venv $(VENV_FOLDER) + @$(BIN_FOLDER)/pip3 install -U "pip" "uv" + @$(UV) pip install horse-with-no-namespace + fi + +constraints-zope.txt: ## Generate constraints file + @echo "$(GREEN)==> Generate constraints file$(RESET)" + @echo '-c https://zopefoundation.github.io/Zope/releases/$(ZOPE_VERSION)/constraints.txt' > constraints-zope.txt + +############################################ +# Config +############################################ +instance/etc/zope.ini instance/etc/zope.conf: ## Create instance configuration + @echo "$(GREEN)==> Create instance configuration$(RESET)" + @$(UVX) cookiecutter -f --no-input -c 2.1.1 --config-file instance.yaml gh:plone/cookiecutter-zope-instance + +.PHONY: config +config: instance/etc/zope.ini + +############################################ +# Installation +############################################ +requirements-mxdev.txt: constraints-zope.txt ## Generate constraints file + @echo "$(GREEN)==> Generate constraints file$(RESET)" + @$(UVX) mxdev -c mx.ini + +.PHONY: install +install: venv config constraints-zope.txt requirements-mxdev.txt ## Install Zope and dependencies + @echo "$(GREEN)==> Install Zope and dependencies$(RESET)" + @$(UV) pip install Paste -c constraints-zope.txt + @$(UV) pip install Products.Sessions -c constraints-zope.txt + @$(UV) pip install -r requirements-mxdev.txt + +############################################ +# Instance +############################################ +.PHONY: start +start: venv instance/etc/zope.ini ## Start a Zope instance on localhost:8080 + @$(UV) run runwsgi instance/etc/zope.ini + +.PHONY: zconsole +zconsole: $(VENV_FOLDER) instance/etc/zope.ini ## Start a console into a Zope instance + @$(UV) run zconsole debug instance/etc/zope.conf + +############################################ +# QA +############################################ +.PHONY: pre-commit +pre-commit: venv ## pre-commit install + @echo "$(GREEN)==> pre-commit$(RESET)" + @$(UVX) pre-commit install + +.PHONY: lint +lint: venv ## Check and fix code base according to Plone standards + @echo "$(GREEN)==> Lint codebase$(RESET)" + @$(UVX) ruff@latest check --fix --config pyproject.toml + @$(UVX) pyroma@latest -d . + @$(UVX) check-python-versions@latest . + @$(UVX) zpretty@latest --check src + +.PHONY: format +format: venv ## Check and fix code base according to Plone standards + @echo "$(GREEN)==> Format codebase$(RESET)" + @$(UVX) ruff@latest check --fix --config pyproject.toml + @$(UVX) ruff@latest format --config pyproject.toml + @$(UVX) zpretty@latest -i src + +.PHONY: manifest +manifest: venv ## Check Manifest + @echo "$(GREEN)==> Check Manifest$(RESET)" + @$(UVX) check-manifest@latest -v + +.PHONY: check +check: format lint manifest ## Check and fix code base according to Plone standards + +############################################ +# Tests +############################################ +.PHONY: test +test: venv ## run tests + @echo "$(GREEN)==> Running tests$(RESET)" + @$(UV) export --format requirements-txt --extra test -o requirements-test.txt + @$(UV) pip install -r requirements-test.txt + @$(UV) pip install horse-with-no-namespace + @rm requirements-test.txt + @$(UV) run pytest --disable-warnings + +.PHONY: test-coverage +test-coverage: venv ## run tests with coverage + @echo "$(GREEN)==> Running tests$(RESET)" + @$(UV) export --format requirements-txt --extra test -o requirements-test.txt $(NUL) + @$(UV) pip install -r requirements-test.txt $(NUL) + @$(UV) pip install horse-with-no-namespace + @rm requirements-test.txt + @$(UV) run pytest --cov=Products.mcdutils --cov-report term-missing --cov-report=html:coverage-html-report + +############################################ +# Docs +############################################ +.PHONY: docs +docs: venv ## Building the documentation + @echo "$(GREEN)==> Building the documentation$(RESET)" + @$(UV) export --format requirements-txt --extra docs -o requirements-docs.txt $(NUL) + @$(UV) pip install -r requirements-docs.txt $(NUL) + @rm requirements-docs.txt + @$(UV) run sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html + +.PHONY: watch-docs +watch-docs: venv ## Watchiling docs + @echo "$(GREEN)==> Watchiling docs$(RESET)" + @$(UV) export --format requirements-txt --extra docs -o requirements-docs.txt $(NUL) + @$(UV) pip install -r requirements-docs.txt $(NUL) + @rm requirements-docs.txt + @$(UV) pip install sphinx-autobuild + @$(UV) run sphinx-autobuild docs docs/_build/html + +############################################ +# Release +############################################ +.PHONY: changelog +changelog: venv ## Release the package to pypi.org + @echo "🚀 Display the draft for the changelog" + @$(UV) pip install zestreleaser-towncrier==1.3.0 + @$(UV) run towncrier --draft --yes + +.PHONY: release +release: venv ## Release the package + @echo "🚀 Release package" + @$(UV) pip install zest.releaser[recommended]==9.6.2 + @$(UV) run prerelease + @$(UV) run release + @rm -Rf dist + @$(UV) build +# @$(UV) publish + @$(UV) run postrelease + +############################################ +# Dependency graph +############################################ + +.PHONY: dependency-graph +dependency-graph: venv ## Dependency graph + @echo "📈 Dependency graph" + @$(UV) pip install horse pipdeptree graphviz + @$(UV) run pipdeptree --exclude setuptools,wheel,pipdeptree,zope.interface,zope.component --graph-output svg > dependencies.svg + +############################################ +# Dependency circular +############################################ + +.PHONY: dependency-circular +dependency-circular: venv constraints-zope.txt## Dependency circular + @echo "🔃 Dependency circular" + @$(UV) pip install horse pipdeptree + @$(UV) pip install pipforester -c constraints-zope.txt + @rm constraints-zope.txt +# Generate the full dependency tree + @$(UV) run pipdeptree -j > forest.json +# Generate a DOT graph with the circular dependencies, if any + @$(UV) run pipforester -i forest.json -o forest.dot --cycles +# Report if there are any circular dependencies, i.e. error if there are any + @$(UV) run pipforester -i forest.json --check-cycles -o /dev/null + +############################################ +# Clean +############################################ + +.PHONY: clean +clean: ## Clean + @echo "$(RED)==> Cleaning environment and build$(RESET)" + [ -f forest.dot ] && rm forest.dot + [ -f forest.json ] && rm forest.json + [ -f dependencies.svg ] && rm dependencies.svg + [ -f constraints-zope.txt ] && rm constraints-zope.txt + [ -f requirements-mxdev.txt ] && rm requirements-mxdev.txt + [ -f requirements.txt ] && rm requirements.txt + [ -f constraints-mxdev.txt ] && rm constraints-mxdev.txt + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -rf {} + + find . -type d -name "__pycache__" -exec rm -rf {} + + find . -name "*.pyc" -delete + find . -name "*.pyo" -delete + find . -name ".pytest_cache" -exec rm -rf {} + + find . -name ".ruff_cache" -exec rm -rf {} + + find . -name ".eggs" -exec rm -rf {} + + find . -name "coverage-html-report" -exec rm -rf {} + + find . -name ".venv" -exec rm -rf {} + + find . -name "dist" -exec rm -rf {} + + find . -name "build" -exec rm -rf {} + + find . -name "instance" -exec rm -rf {} + + find . -name ".coverage" -delete + find . -name "uv.lock" -delete diff --git a/README.md b/README.md new file mode 100644 index 0000000..176b6a3 --- /dev/null +++ b/README.md @@ -0,0 +1,53 @@ +# Products.mcdutils + +`Products.mcdutils` provides an alternative to the ZODB-based session container (the `Transience` product) by using **memcached** as the _backing store_. It brings components for Zope/Plone that enable: + +- **MemCacheSessionDataContainer** (`sessiondata.py`): A session data container persisted in one or more **memcached** servers instead of the ZODB. +- **MemCacheMapping** (`mapping.py`): A transactional mapping (compatible with `IDataManager` / savepoints) that represents the session. Integrates with Zope's/`transaction`'s transaction mechanism. +- **MemCacheProxy** (`proxy.py`): A _proxy_ that manages the memcached client, servers, and serialization of session objects. +- **ZCache / ZCacheManager (work-alike)** (`zcache.py`): An in-memory cache implementation based on memcached with an API compatible with `RAMCacheManager` for use in Zope views/objects. +- **ZMI Views and Screens** (`www/*.pt`): Helpers for creating/configuring containers, proxies, and adding items for testing/diagnostics. +- **Transaction Integration**: Session objects participate in the `tpc_vote / tpc_finish` cycle, with safe writing only at the end of the transaction. + +> In practical terms: you get fast, distributed sessions, backed by memcached, while maintaining the transactional semantics expected by Zope/Plone. + +## What's New in Version 5.0.0 + +The new `5.0.0` version includes significant improvements in **resilience**, **observability**, and **code modernization**, while maintaining API compatibility. + +### 1) Resilient Memcached Client (New `reconnecting.py` Module) + +- **ReconnectingClient**: A _wrapper_ for `python-memcached` that attempts to **reconnect** and **reactivate** the client on network/server failures. +- **Automatic Monkey-patch** (by default): `memcache.Client` is replaced by `ReconnectingClient` during package import. +- **How to Disable**: Export `MCDUTILS_DISABLE_RECONNECT=1` to revert to the original client. +- **Configurable Backoff**: `MCDUTILS_BACKOFF_MIN_MS` and `MCDUTILS_BACKOFF_MAX_MS` (set both to `0` to disable waiting). +- **Optional Logs**: Enable with `MCDUTILS_LOG=1` and adjust the level with `MCDUTILS_LOG_LEVEL` (e.g., `INFO`, `DEBUG`). +- **Built-in Metrics** (in-process): Enable with `MCDUTILS_METRICS=1`. Exposed counters include, among others: + `mcdutils_reconnect_attempts_total`, `mcdutils_reconnect_success_total`, `mcdutils_reconnect_fail_total`, + `mcdutils_retry_calls_total`, `mcdutils_retry_duration_seconds_sum`, `mcdutils_retry_duration_seconds_count`. +- **Prometheus (Textfile) Export**: Available via `reconnecting.export_prometheus_textfile(path)` when desired. + +### 2) Retry in `tpc_vote` with Metrics and Structured Logs + +- The `MemCacheMapping.tpc_vote` function receives a **wrapper with retry attempts** when a `MemCacheError` occurs during the _vote_. +- **Environment Parameters**: + + - `MCDUTILS_DISABLE_TPC_RETRY=1` → Disables the _retry_ in `tpc_vote`. + - `MCDUTILS_TPC_RETRY_ATTEMPTS` → Number of additional attempts (default: `1`). + - `MCDUTILS_TPC_RETRY_BACKOFF_MS` → _Backoff_ between attempts (ms, default: `100`). + - `MCDUTILS_LOG=1` / `MCDUTILS_LOG_LEVEL` → Enables logs and adjusts verbosity. + - Additional metrics: `tpc_retry_attempts_total`, `tpc_retry_success_total`, `tpc_retry_fail_total`, `tpc_retry_backoff_seconds_sum|count`. + +- **"Forced" Reconnection Path**: Before retrying, the wrapper attempts to invalidate connections (`disconnect_all` / `force_reconnect`) and executes a short _probe_ operation to warm up the client. +- **Motivation**: To mitigate intermittent failures at critical moments in the transaction cycle (e.g., network errors during _vote_). + +### 3) Robustness and Readability Improvements + +- Consistent use of `contextlib.suppress(...)` instead of empty `try/except` blocks for idempotent operations (e.g., invalidating caches/volatile attributes). +- Adoption of _f-strings_, typing, and the `py.typed` marker → better support for _type checkers_. +- _Style_ adjustments and compatibility (strings, imports with `from __future__ import annotations`, minor _cleanups_ in views and ZCache). + +### 4) Compatibility and API + +- **No Declared API Breaks** for the core components (Proxy, Mapping, SessionDataContainer, and ZCache). +- Default behavior is **more resilient** due to _reconnecting_ and _retry_ in `tpc_vote` — both can be **disabled by environment variable** if necessary. \ No newline at end of file diff --git a/buildout.cfg b/buildout.cfg deleted file mode 100644 index 56b6639..0000000 --- a/buildout.cfg +++ /dev/null @@ -1,12 +0,0 @@ -[buildout] -extends = - https://zopefoundation.github.io/Zope/releases/master/versions-prod.cfg -develop = . -parts = - test - - -[test] -recipe = zc.recipe.testrunner -defaults = ['-cv'] -eggs = Products.mcdutils diff --git a/docs/conf.py b/docs/conf.py index 45ea7a5..e1e6a5f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -6,36 +6,36 @@ import datetime import os import pkginfo -import sys + parent = os.path.dirname(os.path.dirname(__file__)) parent_dir = os.path.abspath(parent) pkg_info = pkginfo.Develop(parent_dir) -pkg_version = pkg_info.version or '' +pkg_version = pkg_info.version or "" year = datetime.datetime.now().year # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -project = 'Products.mcdutils' -copyright = '2008-%i Tres Seaver and contributors' % year -author = 'Tres Seaver and contributors' +project = "Products.mcdutils" +copyright = f"2008-{year} Tres Seaver and contributors" # noqa: A001 +author = "Tres Seaver and contributors" # The short X.Y version. -version = pkg_version.replace('.dev0', '') +version = pkg_version.replace(".dev0", "") # The full version, including alpha/beta/rc tags. release = pkg_version # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -extensions = ['sphinx.ext.autodoc', 'repoze.sphinx.autointerface'] -templates_path = ['_templates'] -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] -language = 'en' +extensions = ["sphinx.ext.autodoc", "repoze.sphinx.autointerface"] +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +language = "en" # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -html_theme = 'sphinx_rtd_theme' -html_static_path = ['_static'] +html_theme = "sphinx_rtd_theme" +html_static_path = ["_static"] diff --git a/docs/development.rst b/docs/development.rst index 498007d..ca7030c 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -22,45 +22,204 @@ https://github.com/dataflake/Products.mcdutils Preparing the development sandbox --------------------------------- -The following steps only need to be done once to install all the tools and -scripts needed for building, packaging and testing. First, create a -:term:`Virtual environment`. The example here uses Python 3.11, but any Python -version supported by this package will work. Then install all the required -tools: +This project uses a Makefile to automate development, quality assurance, testing, and deployment tasks for a Zope application. -.. code-block:: console +Prerequisites +------------- - $ cd Products.mcdutils - $ python3.11 -m venv . - $ bin/pip install -U pip wheel - $ bin/pip install -U setuptools zc.buildout tox twine +- Python 3.11 or higher +- ``make`` (available on most Unix-like systems) +Getting Started +--------------- -Running the tests ------------------ -You can use ``tox`` to run the unit and integration tests in this package. The -shipped ``tox`` configuration can run the tests for all supported platforms. -You can read the entire long list of possible options on the -`tox CLI interface documentation page -`_, but the following examples -will get you started: +Initial Setup +~~~~~~~~~~~~~ -.. code-block:: console +.. code-block:: bash - $ bin/tox -l # List all available environments - $ bin/tox -pall # Run tests for all environments in parallel - $ bin/tox -epy311 # Run tests on Python 3.11 only - $ bin/tox -elint # Run package sanity checks and lint the code + # Set up virtual environment and install dependencies + make install +Development +~~~~~~~~~~~ -Building the documentation --------------------------- -``tox`` is also used to build the :term:`Sphinx`-based documentation. The -input files are in the `docs` subfolder and the documentation build step will -compile them to HTML. The output is stored in `docs/_build/html/`: +.. code-block:: bash -.. code-block:: console + # Start local Zope instance (localhost:8080) + make start + + # Access Zope console for debugging + make zconsole + +Available Commands +------------------ + +Development +~~~~~~~~~~~ + +- ``make install`` - Installs Zope and all dependencies +- ``make start`` - Starts Zope instance on localhost:8080 +- ``make zconsole`` - Starts Zope console for debugging + +Code Quality +~~~~~~~~~~~~ + +- ``make check`` - Runs all quality checks +- ``make format`` - Automatically formats code +- ``make lint`` - Checks code style and standards +- ``make manifest`` - Verifies MANIFEST.in file + +Testing +~~~~~~~ + +- ``make test`` - Runs test suite +- ``make test-coverage`` - Runs tests with coverage report + +Documentation +~~~~~~~~~~~~~ + +- ``make docs`` - Generates HTML documentation +- ``make watch-docs`` - Generates documentation with auto-reload + + +Dependencies +~~~~~~~~~~~~ + +- ``make dependency-graph`` - Generates dependency graph (SVG) +- ``make dependency-circular`` - Checks for circular dependencies + +Release +~~~~~~~ + +- ``make changelog`` - Shows changelog draft +- ``make release`` - Publishes new package version + +Configuration +~~~~~~~~~~~~~ + +- ``make config`` - Creates Zope instance configuration +- ``make pre-commit`` - Installs pre-commit hooks + +Cleanup +~~~~~~~ + +- ``make clean`` - Removes temporary files and builds + +Configuration +------------- + +Environment Variables +~~~~~~~~~~~~~~~~~~~~~ + +- ``PYTHON`` - Defines Python interpreter (default: python3) + +Project Structure +~~~~~~~~~~~~~~~~~ + +- ``.venv/`` - Python virtual environment +- ``instance/`` - Zope instance configuration +- ``src/`` - Project source code +- ``docs/`` - Documentation + +Dependencies +------------ + +The project uses: + +- **Zope** 5.13 +- **uv** - Fast package manager +- **mxdev** - Dependency management +- **Ruff** - Linter and formatter +- **Pytest** - Testing framework +- **Sphinx** - Documentation generation + +Testing +------- + +.. code-block:: bash + + # Run basic tests + make test + + # Run tests with coverage + make test-coverage + +Documentation +------------- + +.. code-block:: bash + + # Generate documentation + make docs + + # Develop documentation with auto-reload + make watch-docs + +Deployment and Release +---------------------- + +Release Process +~~~~~~~~~~~~~~~ + +1. Check changes: + + .. code-block:: bash + + make changelog + +2. Execute release: + + .. code-block:: bash + + make release + +Code Quality +------------ + +The project follows Plone standards for code quality: + +.. code-block:: bash + + # Complete verification + make check + + # Formatting only + make format + + # Linting only + make lint + +Maintenance +----------- + +Environment Cleanup +~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + make clean + +Removes: + +- Python cache files +- Temporary builds +- Coverage reports +- Virtual environment +- Temporary dependency files + +Development +----------- + +Recommended Setup +~~~~~~~~~~~~~~~~~ + +1. Run ``make pre-commit`` to install pre-commit hooks +2. Use ``make check`` before committing +3. Run ``make test`` to verify functionality - $ bin/tox -edocs +Dependency Checks +~~~~~~~~~~~~~~~~~ +- ``make dependency-graph`` - Visualizes project dependencies +- ``make dependency-circular`` - Detects circular dependencies -If the documentation contains doctests they are run as well. diff --git a/docs/screen01.png b/docs/screen01.png index 6bd3d2a..b0343bd 100644 Binary files a/docs/screen01.png and b/docs/screen01.png differ diff --git a/docs/screen02.png b/docs/screen02.png index 3663455..0b3765a 100644 Binary files a/docs/screen02.png and b/docs/screen02.png differ diff --git a/docs/screen03.png b/docs/screen03.png index 934729c..9e52c30 100644 Binary files a/docs/screen03.png and b/docs/screen03.png differ diff --git a/docs/screen04.png b/docs/screen04.png index 1ecdcf8..d022619 100644 Binary files a/docs/screen04.png and b/docs/screen04.png differ diff --git a/docs/screen05.png b/docs/screen05.png index a7a1f98..ad7971c 100644 Binary files a/docs/screen05.png and b/docs/screen05.png differ diff --git a/docs/screen06.png b/docs/screen06.png index 6092768..049de1c 100644 Binary files a/docs/screen06.png and b/docs/screen06.png differ diff --git a/docs/screen07.png b/docs/screen07.png index 6fef953..50b0844 100644 Binary files a/docs/screen07.png and b/docs/screen07.png differ diff --git a/docs/screen08.png b/docs/screen08.png index c5d660e..96f11b3 100644 Binary files a/docs/screen08.png and b/docs/screen08.png differ diff --git a/instance.yaml b/instance.yaml new file mode 100644 index 0000000..7895957 --- /dev/null +++ b/instance.yaml @@ -0,0 +1,2 @@ +default_context: + initial_user_password: 'admin' \ No newline at end of file diff --git a/mx.ini b/mx.ini new file mode 100644 index 0000000..133b432 --- /dev/null +++ b/mx.ini @@ -0,0 +1,14 @@ +; This is a mxdev configuration file +; it can be used to override versions of packages already defined in the +; constraints files and to add new packages from VCS like git. +; to learn more about mxdev visit https://pypi.org/project/mxdev/ + +[settings] +main-package = -e .[test] + +; example section to use packages from git +; [example.contenttype] +; url = https://github.com/collective/example.contenttype.git +; pushurl = git@github.com:collective/example.contenttype.git +; extras = test +; branch = feature-7 \ No newline at end of file diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..d3aa1f0 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,7 @@ + +[mypy] +python_version = 3.11 +ignore_missing_imports = True +disallow_untyped_defs = False +warn_unused_ignores = True +warn_return_any = False diff --git a/news/.changelog_template.jinja b/news/.changelog_template.jinja new file mode 100644 index 0000000..b35bff3 --- /dev/null +++ b/news/.changelog_template.jinja @@ -0,0 +1,15 @@ +{% if sections[""] %} +{% for category, val in definitions.items() if category in sections[""] %} + +### {{ definitions[category]['name'] }} + +{% for text, values in sections[""][category].items() %} +- {{ text }} {{ values|join(', ') }} +{% endfor %} + +{% endfor %} +{% else %} +No significant changes. + + +{% endif %} \ No newline at end of file diff --git a/news/.gitkeep b/news/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/news/17ce00cc.internal b/news/17ce00cc.internal new file mode 100644 index 0000000..c08f539 --- /dev/null +++ b/news/17ce00cc.internal @@ -0,0 +1,2 @@ +Update configuration files. +[plone devs] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..dc6a88b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,294 @@ +# --------------------------- +# Build configuration +# --------------------------- +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +# --------------------------- +# Project metadata +# --------------------------- +[project] +name = "Products.mcdutils" +dynamic = ["version"] +description = "A Zope product with memcached-backed ZCache and Zope session implementations." +readme = {file = "README.md", content-type = "text/markdown"} +authors = [ + {name = "Tres Seaver and contributors", email = "tseaver@palladion.com"}, +] +maintainers = [ + {name = "Jens Vagelpohl", email = "jens@dataflake.org"} +] +keywords = ["zope", "session", "memcache", "memcached", "Products"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Web Environment", + "Framework :: Zope", + "Framework :: Zope :: 5", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Internet :: WWW/HTTP :: Session", +] +license = "ZPL-2.1" +requires-python = ">=3.11" +dependencies = [ + "python-memcached", + "Zope >= 5", +] + +[project.urls] +Homepage = "https://mcdutils.readthedocs.io" +Documentation = "https://mcdutils.readthedocs.io" +"Issue Tracker" = "https://github.com/dataflake/Products.mcdutils/issues" +"Source Code" = "https://github.com/dataflake/Products.mcdutils" + +[project.optional-dependencies] +test = [ + "pytest-cov", + "pytest<=8.5.0", +] +docs = [ + "sphinx", + "repoze.sphinx.autointerface", + "sphinx-rtd-theme", + "pkginfo", +] +# --------------------------- +# Hatch build configuration +# --------------------------- +[tool.hatch.build] +strict-naming = true + +[tool.hatch.version] +path = "src/Products/mcdutils/__init__.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/pyproject.toml", + "/README.*", + "/LICENSE*", + "/src/**", + "/*.md", + "/*.rst", + "/*.txt", + "/*.in", + "/*.js", + "/*.json", + "/*.py", + "/docs/**", +] + +exclude = [ + "resources/**", + "tests/**", + ".gitignore", + ".pre-commit-config.yaml", + ".mypy.ini", + "instance.yaml", + "requirements.txt", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/Products"] + +# --------------------------- +# Manifest +# --------------------------- +[tool.check-manifest] +ignore = [ + ".flake8", + "dependabot.yml", + "mx.ini", + "resources/**", + "tests/**", + "news/*.gitkeep", + "news/*.internal", + "news/*.jinja", + "Makefile", + ".pre-commit-config.yaml", + ".editorconfig", + ".meta.toml", + "docs/_build/html/_sources/*", + "docs/_build/html/_images/*", + "docs/_build/html/_static/*", + "docs/requirements.txt", + "mypy.ini", + ".readthedocs.yaml", + "instance.yaml", +] + + +# --------------------------- +# Changelog +# --------------------------- + +[tool.towncrier] +package = "Products.mcdutils" +directory = "news/" +filename = "CHANGE.md" +start_string = "\n" +title_format = "## {version} ({project_date})" +template = "news/.changelog_template.jinja" +issue_format = "[#{issue}](https://github.com/dataflake/Products.mcdutils/issues/{issue})" +underlines = ["", "", ""] + +[[tool.towncrier.type]] +directory = "breaking" +name = "Breaking changes:" +showcontent = true + +[[tool.towncrier.type]] +directory = "feature" +name = "New features:" +showcontent = true + +[[tool.towncrier.type]] +directory = "bugfix" +name = "Bug fixes:" +showcontent = true + +[[tool.towncrier.type]] +directory = "internal" +name = "Internal:" +showcontent = true + +[[tool.towncrier.type]] +directory = "documentation" +name = "Documentation:" +showcontent = true + +[[tool.towncrier.type]] +directory = "tests" +name = "Tests" +showcontent = true + +# --------------------------- +# codespell +# --------------------------- +[tool.codespell] +ignore-words-list = "discreet,vew" +skip = "*.po,*" + +# --------------------------- +# Ruff configuration +# --------------------------- +[tool.ruff] +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +line-length = 79 +indent-width = 4 +target-version = "py312" +fix = true + +[tool.ruff.lint] +select = [ + "YTT", # flake8-2020 + "S", # flake8-bandit + "B", # flake8-bugbear + "A", # flake8-builtins + "C4", # flake8-comprehensions + "T10", # flake8-debugger + "SIM", # flake8-simplify + "C90", # mccabe + "E", "W", "F", # pycodestyle + pyflakes + "PGH", # pygrep-hooks + "UP", # pyupgrade + "I", # isort + "RUF", # ruff-specific +] +ignore = ["E731"] # allow lambda assignment +fixable = ["ALL"] +unfixable = [] + +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +preview = true +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" +docstring-code-format = false +docstring-code-line-length = "dynamic" + +[tool.ruff.lint.mccabe] +max-complexity = 15 + +[tool.ruff.lint.isort] +force-single-line = true +from-first = true +case-sensitive = false +lines-after-imports = 2 +lines-between-types = 1 +no-sections = true +order-by-type = false + +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["E501", "RUF001", "S101"] + + +# --------------------------- +# Bandit configuration +# --------------------------- +[tool.bandit] +targets = "src" +exclude_dirs = ["tests"] + +# --------------------------- +# Zest releaser configuration +# --------------------------- +[tool.zest-releaser] +upload-pypi = false +python-file-with-version = "src/Products/mcdutils/__init__.py" + +# --------------------------- +# Black configuration +# --------------------------- +[tool.black] +line-length = 79 # mesmo valor do ruff +target-version = ["py312"] # igual ao ruff target-version +skip-string-normalization = false # mantém aspas duplas (como ruff) + +# --------------------------- +# test +# --------------------------- + +[tool.pytest.ini_options] +testpaths = ["tests"] + +[tool.coverage.run] +source_pkgs = ["Products.mcdutils", "tests"] +branch = true +parallel = true + diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 587ae1b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Generated from: -# https://github.com/zopefoundation/meta/tree/master/config/zope-product -[bdist_wheel] -universal = 0 - -[flake8] -doctests = 1 -no-accept-encodings = True -htmldir = parts/flake8 - -[check-manifest] -ignore = - .editorconfig - .meta.toml - docs/_build/html/_sources/* - docs/_build/html/_images/* - docs/_build/html/_static/* - -[isort] -force_single_line = True -combine_as_imports = True -sections = FUTURE,STDLIB,THIRDPARTY,ZOPE,FIRSTPARTY,LOCALFOLDER -known_third_party = memcache, six -known_zope = -known_first_party = -default_section = ZOPE -line_length = 79 -lines_after_imports = 2 diff --git a/setup.py b/setup.py deleted file mode 100644 index 4dbf34d..0000000 --- a/setup.py +++ /dev/null @@ -1,76 +0,0 @@ -############################################################################## -# -# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. -# -# This software is subject to the provisions of the Zope Public License, -# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -# FOR A PARTICULAR PURPOSE. -# -############################################################################# - -from setuptools import find_packages -from setuptools import setup - - -def _read(name): - with open(name) as fp: - return fp.read() - - -setup(name='Products.mcdutils', - version='4.3.dev0', - description=('A Zope product with memcached-backed ZCache and ' - 'Zope session implementations.'), - long_description=_read('README.rst') + '\n\n' + _read('CHANGES.rst'), - classifiers=[ - 'Development Status :: 6 - Mature', - 'Environment :: Web Environment', - 'Framework :: Zope', - 'Framework :: Zope :: 5', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Zope Public License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Topic :: Internet :: WWW/HTTP :: Session', - ], - keywords='zope session memcache memcached Products', - author='Tres Seaver and contributors', - author_email='tseaver@palladion.com', - maintainer='Jens Vagelpohl', - maintainer_email='jens@dataflake.org', - url='https://mcdutils.readthedocs.io', - project_urls={ - 'Documentation': 'https://mcdutils.readthedocs.io', - 'Issue Tracker': ('https://github.com/dataflake/Products.mcdutils' - '/issues'), - 'Sources': 'https://github.com/dataflake/Products.mcdutils', - }, - license='ZPL 2.1', - packages=find_packages('src'), - package_dir={'': 'src'}, - include_package_data=True, - namespace_packages=['Products'], - zip_safe=False, - python_requires='>=3.7', - install_requires=[ - 'setuptools', - 'python-memcached', - 'Zope >= 5', - ], - extras_require={ - 'docs': ['sphinx', - 'repoze.sphinx.autointerface', - 'sphinx-rtd-theme', - 'pkginfo'], - }, - ) diff --git a/src/Products/__init__.py b/src/Products/__init__.py index de40ea7..5284146 100644 --- a/src/Products/__init__.py +++ b/src/Products/__init__.py @@ -1 +1 @@ -__import__('pkg_resources').declare_namespace(__name__) +__import__("pkg_resources").declare_namespace(__name__) diff --git a/src/Products/mcdutils/__init__.py b/src/Products/mcdutils/__init__.py index a43bc03..6f765f8 100644 --- a/src/Products/mcdutils/__init__.py +++ b/src/Products/mcdutils/__init__.py @@ -1,3 +1,7 @@ +import contextlib +import os + + ############################################################################## # # Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. @@ -16,29 +20,190 @@ """ +__version__ = "5.0.0" + + class MemCacheError(IOError): pass def initialize(context): - - from .proxy import MemCacheProxy from .proxy import addMemCacheProxy from .proxy import addMemCacheProxyForm - context.registerClass(MemCacheProxy, - constructors=(addMemCacheProxyForm, - addMemCacheProxy)) + from .proxy import MemCacheProxy + + context.registerClass( + MemCacheProxy, constructors=(addMemCacheProxyForm, addMemCacheProxy) + ) - from .sessiondata import MemCacheSessionDataContainer from .sessiondata import addMemCacheSessionDataContainer from .sessiondata import addMemCacheSessionDataContainerForm - context.registerClass(MemCacheSessionDataContainer, - constructors=(addMemCacheSessionDataContainerForm, - addMemCacheSessionDataContainer)) + from .sessiondata import MemCacheSessionDataContainer + + context.registerClass( + MemCacheSessionDataContainer, + constructors=( + addMemCacheSessionDataContainerForm, + addMemCacheSessionDataContainer, + ), + ) - from .zcache import MemCacheZCacheManager from .zcache import addMemCacheZCacheManager from .zcache import addMemCacheZCacheManagerForm - context.registerClass(MemCacheZCacheManager, - constructors=(addMemCacheZCacheManagerForm, - addMemCacheZCacheManager)) + from .zcache import MemCacheZCacheManager + + context.registerClass( + MemCacheZCacheManager, + constructors=(addMemCacheZCacheManagerForm, addMemCacheZCacheManager), + ) + + +# --- mcdutils: enable resilient memcache client by default ------------------- +try: # patch python-memcached Client with a reconnecting wrapper + from .reconnecting import patch_memcache as _mcdutils_patch_memcache + + _mcdutils_patch_memcache() +except Exception: # pragma: no cover - safe import guard # noqa: S110 + pass +# ----------------------------------------------------------------------------- + +# --- mcdutils: add retry at tpc_vote with metrics & structured logs +try: + from .mapping import MemCacheMapping + + import logging + import os + import time + + if not os.environ.get("MCDUTILS_DISABLE_TPC_RETRY"): + from . import MemCacheError as _MCD_Err + from .reconnecting import incr_metric as _mcd_incr_metric # metrics + + log = logging.getLogger("Products.mcdutils.tpc") + _orig_tpc_vote = getattr(MemCacheMapping, "tpc_vote", None) + + if _orig_tpc_vote is not None and not getattr( + _orig_tpc_vote, "_mcdutils_patched", False + ): + + def _tpc_vote_with_retry(self, txn): # noqa: C901 + attempts = 1 + try: + attempts = int( + os.environ.get("MCDUTILS_TPC_RETRY_ATTEMPTS", "1") + ) + except Exception: + attempts = 1 + backoff_ms = 0 + try: + backoff_ms = int( + os.environ.get("MCDUTILS_TPC_RETRY_BACKOFF_MS", "100") + ) + except Exception: + backoff_ms = 0 + + # First attempt (original behavior) + try: + return _orig_tpc_vote(self, txn) + except _MCD_Err as first_exc: + last_exc = first_exc + if os.environ.get("MCDUTILS_LOG") == "1": + log.warning( + "tpc_vote failed on first try; will retry", + extra={ + "event": "tpc_vote_fail_first", + "retries": attempts, + "backoff_ms": backoff_ms, + }, + ) + + # Retry loop + for i in range(1, attempts + 1): + t0 = time.time() + # Metrics: count attempt + with contextlib.suppress(Exception): + _mcd_incr_metric("tpc_retry_attempts_total", 1) + try: + # Hard reconnect path + # Accept both _p_proxy (observed) and _proxy (older) + _proxy_obj = getattr(self, "_p_proxy", None) + if _proxy_obj is None: + _proxy_obj = getattr(self, "_proxy", None) + if _proxy_obj is None: + _proxy_obj = getattr(self, "proxy", None) + _client = getattr(_proxy_obj, "client", None) + if _client is not None: + disc = getattr(_client, "disconnect_all", None) + if callable(disc): + with contextlib.suppress(Exception): + disc() + frc = getattr(_client, "force_reconnect", None) + if callable(frc): + with contextlib.suppress(Exception): + frc() + # Dummy op to warm/validate + try: + setter = getattr(_client, "set", None) + if callable(setter): + setter("mcdutils:tpc_probe", 1, time=2) + except Exception: # noqa: S110 + pass + + # Optional backoff + if backoff_ms > 0: + time.sleep(backoff_ms / 1000.0) + try: + _mcd_incr_metric( + "tpc_retry_backoff_seconds_sum", + backoff_ms / 1000.0, + ) + _mcd_incr_metric( + "tpc_retry_backoff_seconds_count", 1 + ) + except Exception: # noqa: S110 + pass + + # Attempt vote again + result = _orig_tpc_vote(self, txn) + # Success metrics + with contextlib.suppress(Exception): + _mcd_incr_metric("tpc_retry_success_total", 1) + if os.environ.get("MCDUTILS_LOG") == "1": + log.info( + "tpc_vote retry succeeded", + extra={ + "event": "tpc_vote_retry_ok", + "attempt": i, + "elapsed_ms": int( + (time.time() - t0) * 1000 + ), + }, + ) + return result + except _MCD_Err as exc: + last_exc = exc + # Failure metrics + with contextlib.suppress(Exception): + _mcd_incr_metric("tpc_retry_fail_total", 1) + if os.environ.get("MCDUTILS_LOG") == "1": + log.error( + "tpc_vote retry failed", + extra={ + "event": "tpc_vote_retry_fail", + "attempt": i, + "elapsed_ms": int( + (time.time() - t0) * 1000 + ), + }, + ) + continue + + # If still failing, re-raise last MemCacheError + raise last_exc + + _tpc_vote_with_retry._mcdutils_patched = True + MemCacheMapping.tpc_vote = _tpc_vote_with_retry +except Exception: # noqa: S110 + # Safe guard: don't break import if anything goes wrong here + pass +# ----------------------------------------------------------------------------- diff --git a/src/Products/mcdutils/ftests/__init__.py b/src/Products/mcdutils/ftests/__init__.py deleted file mode 100644 index 8351bdf..0000000 --- a/src/Products/mcdutils/ftests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -""" Product functional tests for mcdutils, run with "zopectl run" -""" diff --git a/src/Products/mcdutils/ftests/test_proxy.py b/src/Products/mcdutils/ftests/test_proxy.py deleted file mode 100644 index 5a6cb48..0000000 --- a/src/Products/mcdutils/ftests/test_proxy.py +++ /dev/null @@ -1,66 +0,0 @@ -############################################################################## -# -# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. -# -# This software is subject to the provisions of the Zope Public License, -# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -# FOR A PARTICULAR PURPOSE. -# -############################################################################# -""" Functional tests for Products.mcdutils.proxy """ -import unittest - - -class MemCacheSDCFuncTests(unittest.TestCase): - - def _makeOne(self): - from ..proxy import MemCacheProxy - from ..sessiondata import MemCacheSessionDataContainer - sdc = MemCacheSessionDataContainer() - sdc.mcproxy = MemCacheProxy() - sdc.proxy_path = 'mcproxy' - - return sdc - - def test_writing_to_mapping_no_memcache(self): - from ..mapping import MemCacheMapping - - sdc = self._makeOne() - mapping = sdc.new_or_existing('foobar') - self.assertTrue(isinstance(mapping, MemCacheMapping)) - self.assertFalse(mapping._p_changed) - self.assertFalse(mapping._p_joined) - mapping['abc'] = 1345 - self.assertTrue(mapping._p_changed) - self.assertTrue(mapping._p_joined) - import transaction - transaction.commit() - - def test_writing_to_mapping_with_memcache(self): - from ..mapping import MemCacheMapping - - sdc = self._makeOne() - sdc._get_proxy().servers = ('localhost:11211',) - mapping = sdc.new_or_existing('foobar') - self.assertTrue(isinstance(mapping, MemCacheMapping)) - self.assertFalse(mapping._p_changed) - self.assertTrue(mapping._p_joined) - mapping['abc'] = 1345 - self.assertTrue(mapping._p_changed) - self.assertTrue(mapping._p_joined) - import transaction - transaction.commit() - - def test_writing_to_mapping_with_invalid_memcache_raises(self): - from .. import MemCacheError - - sdc = self._makeOne() - sdc._get_proxy().servers = ('nonesuch:999999',) - mapping = sdc.new_or_existing('foobar') - mapping['abc'] = 1345 - import transaction - self.assertRaises(MemCacheError, transaction.commit) - transaction.abort() diff --git a/src/Products/mcdutils/interfaces.py b/src/Products/mcdutils/interfaces.py index 15766ef..880dc58 100644 --- a/src/Products/mcdutils/interfaces.py +++ b/src/Products/mcdutils/interfaces.py @@ -10,27 +10,27 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" Products.mcdutils interfaces """ +"""Products.mcdutils interfaces""" + from transaction.interfaces import ISavepointDataManager from zope.interface import Attribute from zope.interface import Interface class ISessionDataContainer(Interface): - """ Document the implied interface expected by Zope's SessionDataManager. - """ + """Document the implied interface expected by Zope's SessionDataManager.""" + def has_key(key): - """ Return True if the container has the key, else False. - """ + """Return True if the container has the key, else False.""" def new_or_existing(key): - """ Return a mapping for 'key', creating it if needed. + """Return a mapping for 'key', creating it if needed. o The returned object must be Acquisition-wrappable. """ def get(key): - """ Return a mapping for 'key'. + """Return a mapping for 'key'. o Return None of no mapping exists. @@ -39,36 +39,36 @@ def get(key): class IMemCacheMapping(ISavepointDataManager): - """ Combine Python's mapping protocol with transaction management. - """ + """Combine Python's mapping protocol with transaction management.""" class IMemCacheProxy(Interface): - """ Manage client connection to a pool of memcached servers. - """ - servers = Attribute('servers', """List of servers + """Manage client connection to a pool of memcached servers.""" -Each item is a : server address.""") + servers = Attribute( + "servers", + """List of servers - client = Attribute('client', """memcache.Client instance""") - client.setTaggedValue('read_only', True) +Each item is a : server address.""", + ) + + client = Attribute("client", """memcache.Client instance""") + client.setTaggedValue("read_only", True) def get(key): - """ Return the value stored in the cache under 'key'. - """ + """Return the value stored in the cache under 'key'.""" def get_multi(keys): - """ Return a mapping of values stored in the cache under 'keys'. - """ + """Return a mapping of values stored in the cache under 'keys'.""" def set(key, value): - """ Store value for 'key'. + """Store value for 'key'. o Return a boolean to indicate success. """ def add(key, value): - """ Store value (a mapping) for 'key'. + """Store value (a mapping) for 'key'. o Return a boolean to indicate success. @@ -76,7 +76,7 @@ def add(key, value): """ def replace(key, value): - """ Store value (a mapping) for 'key'. + """Store value (a mapping) for 'key'. o Return a boolean to indicate success. @@ -84,7 +84,7 @@ def replace(key, value): """ def delete(key, time=0): - """ Remove the value stored in the cache under 'key'. + """Remove the value stored in the cache under 'key'. o 'time', if passed an integer time value (in seconds) during which the memcached server will block new writes to this key @@ -95,22 +95,21 @@ def delete(key, time=0): class IMemCacheSessionDataContainer(ISessionDataContainer): - """ memcache-specific SDC, using a proxy. - """ + """memcache-specific SDC, using a proxy.""" + proxy_path = Attribute("""Path to proxy. No session operations are possible if the path is invalid.""") class IZCache(Interface): - """ Interface describing API for OFS.Cache.Cache. - """ + """Interface describing API for OFS.Cache.Cache.""" + def ZCache_invalidate(ob): - """ Remove any entries from the cache for 'ob'. - """ + """Remove any entries from the cache for 'ob'.""" def ZCache_get(ob, view_name, keywords, mtime_func, default): - """ Fetch a cache entry for 'ob'. + """Fetch a cache entry for 'ob'. o If an object provides different views that would benefit from caching, it will set 'view_name', which should be treated as @@ -131,7 +130,7 @@ def ZCache_get(ob, view_name, keywords, mtime_func, default): """ def ZCache_set(ob, data, view_name, keywords, mtime_func): - """ Store a value in the cache for 'ob'. + """Store a value in the cache for 'ob'. o 'data' is the value to be stored. @@ -141,8 +140,7 @@ def ZCache_set(ob, data, view_name, keywords, mtime_func): class IZCacheManager(Interface): - """ Interface describing API for OFS.Cache.CacheManager. - """ + """Interface describing API for OFS.Cache.CacheManager.""" + def ZCacheManager_getCache(): - """ Return an object implementing IZCache. - """ + """Return an object implementing IZCache.""" diff --git a/src/Products/mcdutils/mapping.py b/src/Products/mcdutils/mapping.py index ca82248..2035fa1 100644 --- a/src/Products/mcdutils/mapping.py +++ b/src/Products/mcdutils/mapping.py @@ -10,8 +10,10 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" memcache-aware transactional mapping """ -import transaction +"""memcache-aware transactional mapping""" + +from __future__ import annotations + from AccessControl.class_init import InitializeClass from AccessControl.SecurityInfo import ClassSecurityInfo from persistent.mapping import PersistentMapping @@ -20,22 +22,25 @@ from zope.interface import implementedBy from zope.interface import implementer +import contextlib +import transaction + @implementer(ISavepointDataManager + implementedBy(PersistentMapping)) class MemCacheMapping(PersistentMapping): - """ memcache-based mapping which manages its own transactional semantics - """ + """memcache-based mapping which manages its own transactional semantics""" + security = ClassSecurityInfo() def __init__(self, key, proxy): PersistentMapping.__init__(self) self._p_oid = hash(key) - self._p_jar = self # we are our own data manager + self._p_jar = self # we are our own data manager self._p_key = key self._p_proxy = proxy self._p_joined = False - security.setDefaultAccess('allow') + security.setDefaultAccess("allow") security.declareObjectPublic() set = PersistentMapping.__setitem__ @@ -65,99 +70,86 @@ def __repr__(self): # Overriding here to try and hide some password fields, like # the ZPublisher HTTPRequest class tries to do. new_dict = dict(self.data) - for key in new_dict.keys(): - if 'passw' in key.lower(): - new_dict[key] = '' + for key in new_dict: + if "passw" in key.lower(): + new_dict[key] = "" return repr(new_dict) def has_key(self, key): - """ Backwards compatibility under Python 3 """ + """Backwards compatibility under Python 3""" return key in self.data def getContainerKey(self): - """ Fake out (I)Transient API. - """ + """Fake out (I)Transient API.""" return self._p_key def _clean(self): # Remove from proxy cache to force an update # from memcached during next access. - try: + with contextlib.suppress(KeyError): del self._p_proxy._cached[self._p_key] - except KeyError: - pass - security.declarePrivate('abort') # NOQA: D001 + security.declarePrivate("abort") def abort(self, txn): - """ See IDataManager. - """ + """See IDataManager.""" self._clean() - security.declarePrivate('tpc_begin') # NOQA: D001 + security.declarePrivate("tpc_begin") def tpc_begin(self, txn): - """ See IDataManager. - """ + """See IDataManager.""" - security.declarePrivate('commit') # NOQA: D001 + security.declarePrivate("commit") def commit(self, txn): - """ See IDataManager. - """ + """See IDataManager.""" - security.declarePrivate('invalidate') # NOQA: D001 + security.declarePrivate("invalidate") def invalidate(self): - """ See TransientObject. - """ - try: + """See TransientObject.""" + with contextlib.suppress(KeyError): self._p_proxy.delete(self._p_key) - except KeyError: - pass - security.declarePrivate('tpc_vote') # NOQA: D001 + security.declarePrivate("tpc_vote") def tpc_vote(self, txn): - """ See IDataManager. - """ - server, key = self._p_proxy.client._get_server(self._p_key) + """See IDataManager.""" + server, _ = self._p_proxy.client._get_server(self._p_key) if server is None: from Products.mcdutils import MemCacheError + raise MemCacheError("Can't reach memcache server!") - security.declarePrivate('tpc_finish') # NOQA: D001 + security.declarePrivate("tpc_finish") def tpc_finish(self, txn): - """ See IDataManager. - """ + """See IDataManager.""" if self._p_changed: self._p_proxy.set(self._p_key, self) # no error handling self._p_changed = 0 self._p_joined = False self._clean() - security.declarePrivate('tpc_abort') # NOQA: D001 + security.declarePrivate("tpc_abort") def tpc_abort(self, txn): - """ See IDataManager. - """ + """See IDataManager.""" self._p_joined = False self._p_changed = 0 self._clean() - security.declarePrivate('sortKey') # NOQA: D001 + security.declarePrivate("sortKey") def sortKey(self): - """ See IDataManager. - """ - return 'MemCacheMapping: %s' % self._p_key + """See IDataManager.""" + return f"MemCacheMapping: {self._p_key}" - security.declarePrivate('register') # NOQA: D001 + security.declarePrivate("register") def register(self, obj): - """ See IPersistentDataManager - """ + """See IPersistentDataManager""" if obj is not self: raise ValueError("Can't be the jar for another object.") @@ -165,11 +157,10 @@ def register(self, obj): transaction.get().join(self) self._p_joined = True - security.declarePrivate('savepoint') # NOQA: D001 + security.declarePrivate("savepoint") def savepoint(self): - """ See ITransaction - """ + """See ITransaction""" return MemCacheMappingSavepoint() @@ -178,11 +169,10 @@ def savepoint(self): @implementer(IDataManagerSavepoint) class MemCacheMappingSavepoint: - """ A simple savepoint object - """ + """A simple savepoint object""" def rollback(self): - """ Roll back a savepoint + """Roll back a savepoint Memcache and the python-memcached library don't have the concept of a rollback, so there is nothing useful to do here. diff --git a/src/Products/mcdutils/proxy.py b/src/Products/mcdutils/proxy.py index 1d60276..bae5e37 100644 --- a/src/Products/mcdutils/proxy.py +++ b/src/Products/mcdutils/proxy.py @@ -10,43 +10,47 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" Memcache proxy """ -import memcache +"""Memcache proxy""" + +from __future__ import annotations +from .interfaces import IMemCacheProxy +from .mapping import MemCacheMapping from AccessControl.class_init import InitializeClass from AccessControl.SecurityInfo import ClassSecurityInfo from OFS.PropertyManager import PropertyManager from OFS.SimpleItem import SimpleItem from Products.PageTemplates.PageTemplateFile import PageTemplateFile +from typing import Any from zope.interface import implementedBy from zope.interface import implementer -from .interfaces import IMemCacheProxy -from .mapping import MemCacheMapping +import contextlib +import memcache class FauxClient(dict): - def _get_server(self, key): return self, key - def set(self, key, value): + def set(self, key: str, value: Any): self[key] = value -@implementer(IMemCacheProxy + implementedBy(SimpleItem) - + implementedBy(PropertyManager)) +@implementer( + IMemCacheProxy + implementedBy(SimpleItem) + implementedBy(PropertyManager) +) class MemCacheProxy(SimpleItem, PropertyManager): - """ Implement ISDC via a a pool of memcache servers. - """ + """Implement ISDC via a a pool of memcache servers.""" + security = ClassSecurityInfo() _v_cached = None _v_client = None - zmi_icon = 'fas fa-tachometer-alt' + zmi_icon = "fas fa-tachometer-alt" - def __init__(self, id, title=''): - self.id = id + def __init__(self, id_, title=""): + self.id = id_ self.title = title def _get_cached(self): @@ -73,32 +77,28 @@ def _get_client(self): def _set_servers(self, value): self._servers = value - try: + with contextlib.suppress(AttributeError): del self._v_client - except AttributeError: - pass - try: + with contextlib.suppress(AttributeError): del self._v_cache - except AttributeError: - pass servers = property(lambda self: self._servers, _set_servers) # # ZMI # - meta_type = 'MemCache Proxy' - _properties = ({'id': 'title', 'type': 'string', 'mode': 'w'}, - {'id': 'servers', 'type': 'ulines', 'mode': 'w'}) + meta_type = "MemCache Proxy" + _properties = ( + {"id": "title", "type": "string", "mode": "w"}, + {"id": "servers", "type": "ulines", "mode": "w"}, + ) - manage_options = (PropertyManager.manage_options - + SimpleItem.manage_options) + manage_options = PropertyManager.manage_options + SimpleItem.manage_options - security.declarePrivate('get') # NOQA: D001 + security.declarePrivate("get") def get(self, key): - """ See IMemCacheProxy. - """ + """See IMemCacheProxy.""" mapping = self._cached.get(key) if mapping is None: @@ -110,61 +110,53 @@ def get(self, key): return mapping - security.declarePrivate('get_multi') # NOQA: D001 + security.declarePrivate("get_multi") def get_multi(self, keys): - """ See IMemCacheProxy. - """ + """See IMemCacheProxy.""" return self._get_remote_multi(keys) - security.declarePrivate('set') # NOQA: D001 + security.declarePrivate("set") - def set(self, key, value): - """ See IMemCacheProxy. - """ + def set(self, key: str, value: Any): + """See IMemCacheProxy.""" rc = self.client.set(key, value) - try: + with contextlib.suppress(KeyError): del self._cached[key] - except KeyError: - pass return rc - security.declarePrivate('add') # NOQA: D001 + security.declarePrivate("add") def add(self, key, value): - """ Store value (a mapping) for 'key'. + """Store value (a mapping) for 'key'. o Return a boolean to indicate success. o Like 'set', but stores value only if the key does not already exist. """ rc = self.client.add(key, value) - try: + with contextlib.suppress(KeyError): del self._cached[key] - except KeyError: - pass return rc - security.declarePrivate('replace') # NOQA: D001 + security.declarePrivate("replace") def replace(self, key, value): - """ Store value (a mapping) for 'key'. + """Store value (a mapping) for 'key'. o Return a boolean to indicate success. o Like 'set', but stores value only if the key already exists. """ rc = self.client.replace(key, value) - try: + with contextlib.suppress(KeyError): del self._cached[key] - except KeyError: - pass return rc - security.declarePrivate('delete') # NOQA: D001 + security.declarePrivate("delete") def delete(self, key, time=0): - """ Remove the value stored in the cache under 'key'. + """Remove the value stored in the cache under 'key'. o Return a boolean to indicate success. @@ -172,17 +164,14 @@ def delete(self, key, time=0): whether the item's """ rc = self.client.delete(key, time) - try: + with contextlib.suppress(KeyError): del self._cached[key] - except KeyError: - pass return rc - security.declarePrivate('create') # NOQA: D001 + security.declarePrivate("create") def create(self, key): - """ See IMemCacheProxy. - """ + """See IMemCacheProxy.""" mapping = self._cached[key] = MemCacheMapping(key, self) return mapping @@ -219,14 +208,14 @@ def _get_remote_multi(self, keys): InitializeClass(MemCacheProxy) -def addMemCacheProxy(dispatcher, id, title='', REQUEST=None): - """ Add a MCP to dispatcher. - """ +def addMemCacheProxy(dispatcher, id, title="", REQUEST=None): # noqa: A002 + """Add a MCP to dispatcher.""" dispatcher._setObject(id, MemCacheProxy(id, title=title)) if REQUEST is not None: - REQUEST['RESPONSE'].redirect('%s/manage_workspace' - % dispatcher.absolute_url()) + REQUEST["RESPONSE"].redirect( + f"{dispatcher.absolute_url()}/manage_workspace" + ) -addMemCacheProxyForm = PageTemplateFile('www/add_mcp.pt', globals()) +addMemCacheProxyForm = PageTemplateFile("www/add_mcp.pt", globals()) diff --git a/src/Products/mcdutils/py.typed b/src/Products/mcdutils/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/Products/mcdutils/reconnecting.py b/src/Products/mcdutils/reconnecting.py new file mode 100644 index 0000000..bbdfc21 --- /dev/null +++ b/src/Products/mcdutils/reconnecting.py @@ -0,0 +1,258 @@ +""" +Reconnecting wrapper for python-memcached's Client with metrics, logging, +and parametrizable backoff. + +Env vars (all optional): +- MCDUTILS_DISABLE_RECONNECT=1 -> disables the monkey-patch +- MCDUTILS_LOG_LEVEL=DEBUG|INFO|... -> logging level (default: WARNING) +- MCDUTILS_LOG=1 -> enable logging of reconnect events +- MCDUTILS_METRICS=1 -> enable in-process metrics counters +- MCDUTILS_BACKOFF_MIN_MS=50 -> min backoff before retry (ms) +- MCDUTILS_BACKOFF_MAX_MS=200 -> max backoff before retry (ms) + -> set both to 0 to disable backoff +Optionally, you can export Prometheus-format metrics by calling +`export_prometheus_textfile(path)` from your app. +""" + +from __future__ import annotations + +from collections.abc import Iterable +from functools import wraps +from typing import Any + +import contextlib +import logging +import memcache +import os +import random +import socket +import threading +import time + + +_OriginalClient = memcache.Client +_system_random = random.SystemRandom() + +log = logging.getLogger("Products.mcdutils.reconnecting") +_level = os.environ.get("MCDUTILS_LOG_LEVEL") +if _level: + try: + log.setLevel(getattr(logging, _level.upper(), logging.WARNING)) + except Exception: + log.setLevel(logging.WARNING) +else: + log.setLevel(logging.WARNING) +if not log.handlers: + # Defer to root configuration, but ensure at least a NullHandler present + log.addHandler(logging.NullHandler()) + +_ENABLE_LOG = os.environ.get("MCDUTILS_LOG") == "1" +_ENABLE_METRICS = os.environ.get("MCDUTILS_METRICS") == "1" + + +def _env_ms(name: str, default: int) -> int: + try: + v = int(os.environ.get(name, default)) + return max(0, v) + except Exception: + return default + + +_BACKOFF_MIN_MS = _env_ms("MCDUTILS_BACKOFF_MIN_MS", 50) +_BACKOFF_MAX_MS = _env_ms("MCDUTILS_BACKOFF_MAX_MS", 200) +if _BACKOFF_MAX_MS < _BACKOFF_MIN_MS: + _BACKOFF_MAX_MS = _BACKOFF_MIN_MS + +# -------------------- metrics (in-process) -------------------- +_metrics = { + # tpc_vote-level metrics + "tpc_retry_attempts_total": 0, + "tpc_retry_success_total": 0, + "tpc_retry_fail_total": 0, + "tpc_retry_backoff_seconds_sum": 0.0, + "tpc_retry_backoff_seconds_count": 0, + "reconnect_attempts_total": 0, + "reconnect_success_total": 0, + "reconnect_fail_total": 0, + "retry_calls_total": 0, + "retry_duration_seconds_sum": 0.0, + "retry_duration_seconds_count": 0, +} + + +def get_metrics() -> dict[str, float]: + """Return a copy of metrics counters (always available).""" + return dict(_metrics) + + +def incr_metric(name: str, inc: float = 1.0) -> None: + try: + if name in _metrics: + _metrics[name] += inc + else: + _metrics[name] = inc + except Exception: # noqa: S110 + pass + + +def export_prometheus_textfile(path: str) -> None: + """Write metrics in Prometheus textfile format to `path` (atomic write).""" + lines = [ + "# TYPE mcdutils_reconnect_attempts_total counter", + f"mcdutils_reconnect_attempts_total {_metrics['reconnect_attempts_total']}", # noqa: E501 + "# TYPE mcdutils_reconnect_success_total counter", + f"mcdutils_reconnect_success_total {_metrics['reconnect_success_total']}", # noqa: E501 + "# TYPE mcdutils_reconnect_fail_total counter", + f"mcdutils_reconnect_fail_total {_metrics['reconnect_fail_total']}", + "# TYPE mcdutils_retry_calls_total counter", + f"mcdutils_retry_calls_total {_metrics['retry_calls_total']}", + "# TYPE mcdutils_retry_duration_seconds summary", + f"mcdutils_retry_duration_seconds_sum {_metrics['retry_duration_seconds_sum']}", # noqa: E501 + f"mcdutils_retry_duration_seconds_count {_metrics['retry_duration_seconds_count']}", # noqa: E501 + ] + tmp = f"{path}.tmp" + with open(tmp, "w", encoding="utf-8") as fh: + fh.write("\n".join(lines) + "\n") + os.replace(tmp, path) + + +# -------------------------------------------------------------- + +_RECOVERABLE = ( + socket.timeout, + ConnectionError, + BrokenPipeError, + ConnectionResetError, + OSError, # covers "Bad file descriptor", "Transport endpoint not connected", etc. # noqa: E501 +) + + +def _should_reconnect(exc: BaseException) -> bool: + # Conservative: reconnect on any socket-ish error. + if isinstance(exc, _RECOVERABLE): + return True + # python-memcached raises generic Exception for some network issues; + # string-match some common cases without being too specific. + msg = str(exc).lower() + network_markers = ( + "socket", + "timed out", + "reset", + "broken pipe", + "bad file descriptor", + "transport", + ) + return any(m in msg for m in network_markers) + + +class ReconnectingClient: + """ + Drop-in replacement for memcache.Client with automatic + reconnect+retry-once. Includes optional logging, + metrics, and jittered backoff. + """ + + def __init__(self, servers: Iterable[str], **kwargs: Any) -> None: + self._servers = list(servers) + self._kwargs = dict(kwargs) + self._lock = threading.RLock() + self._client = _OriginalClient(self._servers, **self._kwargs) + + def _reconnect(self) -> None: + if _ENABLE_METRICS: + _metrics["reconnect_attempts_total"] += 1 + if _ENABLE_LOG: + log.info( + "mcdutils: reconnecting memcache client to %r", self._servers + ) + with self._lock: + self._client = _OriginalClient(self._servers, **self._kwargs) + if _ENABLE_METRICS: + _metrics["reconnect_success_total"] += 1 + + def _backoff_sleep(self) -> None: + if _BACKOFF_MAX_MS == 0 and _BACKOFF_MIN_MS == 0: + return # disabled + delay_ms = _system_random.uniform(_BACKOFF_MIN_MS, _BACKOFF_MAX_MS) + time.sleep(delay_ms / 1000.0) + + def __getattr__(self, name: str) -> Any: + # Forward attributes; wrap callables with retry-once logic. + attr = getattr(self._client, name) + if not callable(attr): + return attr + + @wraps(attr) + def _wrapped(*args: Any, **kwargs: Any) -> Any: + try: + return attr(*args, **kwargs) + except BaseException as exc: + if not _should_reconnect(exc): + # Non-network problem: bubble up. + raise + start = time.time() + try: + self._reconnect() + except Exception as rex: + if _ENABLE_METRICS: + _metrics["reconnect_fail_total"] += 1 + if _ENABLE_LOG: + log.error( + "mcdutils: reconnect attempt failed: %s", rex + ) + raise + self._backoff_sleep() + # retry once + if _ENABLE_METRICS: + _metrics["retry_calls_total"] += 1 + new_attr = getattr(self._client, name) + try: + return new_attr(*args, **kwargs) + finally: + if _ENABLE_METRICS: + _metrics["retry_duration_seconds_sum"] += ( + time.time() - start + ) + _metrics["retry_duration_seconds_count"] += 1 + + return _wrapped + + # Explicitly expose close so app code can forcefully reset if desired. + def force_reconnect(self) -> None: + """Force a full reconnect by rebuilding underlying client.""" + self._reconnect() + + def close(self) -> None: + try: + c = self._client + except Exception: + return + close = getattr(c, "disconnect_all", None) or getattr(c, "close", None) + if callable(close): + with contextlib.suppress(Exception): + close() + + +# Copy class-level constants (e.g., _SERVER_RETRIES, timeouts) so that any +# external code referencing memcache.Client. keeps working after +# monkey-patch. python-memcached expects some of these to exist at class-level. +for _k, _v in _OriginalClient.__dict__.items(): + try: + if ( + isinstance(_k, str) + and _k.isupper() + and not hasattr(ReconnectingClient, _k) + ): + setattr(ReconnectingClient, _k, _v) + except Exception: # noqa: S110 + pass + + +def patch_memcache() -> None: + """ + Monkey-patch memcache.Client globally to use ReconnectingClient. + Controlled by env var MCDUTILS_DISABLE_RECONNECT to opt-out. + """ + if os.environ.get("MCDUTILS_DISABLE_RECONNECT"): + return + memcache.Client = ReconnectingClient diff --git a/src/Products/mcdutils/sessiondata.py b/src/Products/mcdutils/sessiondata.py index 0740665..1cfb833 100644 --- a/src/Products/mcdutils/sessiondata.py +++ b/src/Products/mcdutils/sessiondata.py @@ -10,7 +10,12 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" Products.mcdutils session data container """ +"""Products.mcdutils session data container""" + +from __future__ import annotations + +from .interfaces import IMemCacheSessionDataContainer +from .mapping import MemCacheMapping from AccessControl.class_init import InitializeClass from AccessControl.SecurityInfo import ClassSecurityInfo from OFS.PropertyManager import PropertyManager @@ -20,30 +25,31 @@ from zope.interface import implementer from ZPublisher.HTTPRequest import default_encoding -from .interfaces import IMemCacheSessionDataContainer -from .mapping import MemCacheMapping - -@implementer(IMemCacheSessionDataContainer + implementedBy(SimpleItem) - + implementedBy(PropertyManager)) +@implementer( + IMemCacheSessionDataContainer + + implementedBy(SimpleItem) + + implementedBy(PropertyManager) +) class MemCacheSessionDataContainer(SimpleItem, PropertyManager): - """ Implement ISDC via a memcache proxy. - """ + """Implement ISDC via a memcache proxy.""" + security = ClassSecurityInfo() _v_proxy = None - proxy_path = '' - zmi_icon = 'far fa-clock' + proxy_path = "" + zmi_icon = "far fa-clock" - def __init__(self, id, title=''): - self.id = id + def __init__(self, id_, title=""): + self.id = id_ self.title = title def _get_proxy(self): if self._v_proxy is None: if not self.proxy_path: from Products.mcdutils import MemCacheError - raise MemCacheError('No proxy defined') + + raise MemCacheError("No proxy defined") self._v_proxy = self.unrestrictedTraverse(self.proxy_path) return self._v_proxy @@ -52,27 +58,25 @@ def _get_proxy(self): # # ZMI # - meta_type = 'MemCache Session Data Container' - _properties = ( - {'id': 'proxy_path', 'type': 'string', 'mode': 'w'}, - ) + meta_type = "MemCache Session Data Container" + _properties = ({"id": "proxy_path", "type": "string", "mode": "w"},) manage_options = ( - PropertyManager.manage_options - + ({'action': 'addItemsToSessionForm', 'label': 'Test'},) - + SimpleItem.manage_options) + PropertyManager.manage_options # noqa: RUF005 + + ({"action": "addItemsToSessionForm", "label": "Test"},) + + SimpleItem.manage_options + ) - security.declarePublic('addItemsToSessionForm') # NOQA: D001 - addItemsToSessionForm = PageTemplateFile('www/add_items.pt', globals()) + security.declarePublic("addItemsToSessionForm") + addItemsToSessionForm = PageTemplateFile("www/add_items.pt", globals()) - security.declarePublic('addItemsToSession') # NOQA: D001 + security.declarePublic("addItemsToSession") def addItemsToSession(self): - """ Add key value pairs from 'items' textarea to the session. - """ + """Add key value pairs from 'items' textarea to the session.""" request = self.REQUEST - items = request.form.get('items', ()) - session = request['SESSION'] + items = request.form.get("items", ()) + session = request["SESSION"] before = len(session.keys()) count = len(items) @@ -80,30 +84,28 @@ def addItemsToSession(self): for line in items: if not isinstance(line, bytes): line = line.encode(default_encoding) - k, v = line.split(b' ', 1) + k, v = line.split(b" ", 1) k = k.strip() v = v.strip() session[k] = v after = len(session.keys()) - return 'Before: %d; after: %d; # items: %d' % (before, after, count) + return f"Before: {before}; after: {after}; # items: {count}" # # ISessionDataContainer implementation # - security.declarePrivate('has_key') # NOQA: D001 + security.declarePrivate("has_key") def has_key(self, key): - """ See ISessionDataContainer. - """ + """See ISessionDataContainer.""" return self._get_proxy().get(self._safe_key(key)) is not None - security.declarePrivate('new_or_existing') # NOQA: D001 + security.declarePrivate("new_or_existing") def new_or_existing(self, key): - """ See ISessionDataContainer. - """ + """See ISessionDataContainer.""" key = self._safe_key(key) mapping = self.get(key) @@ -114,32 +116,32 @@ def new_or_existing(self, key): return mapping - security.declarePrivate('get') # NOQA: D001 + security.declarePrivate("get") def get(self, key): - """ See ISessionDataContainer. - """ + """See ISessionDataContainer.""" return self._get_proxy().get(self._safe_key(key)) def _safe_key(self, key): - """ Helper to ensure the key is always a binary string """ + """Helper to ensure the key is always a binary string""" if isinstance(key, str): - key = key.encode('UTF-8') + key = key.encode("UTF-8") return key InitializeClass(MemCacheSessionDataContainer) -def addMemCacheSessionDataContainer(dispatcher, id, title='', REQUEST=None): - """ Add a MCSDC to dispatcher. - """ +def addMemCacheSessionDataContainer(dispatcher, id, title="", REQUEST=None): # noqa: A002 + """Add a MCSDC to dispatcher.""" dispatcher._setObject(id, MemCacheSessionDataContainer(id, title=title)) if REQUEST is not None: - REQUEST['RESPONSE'].redirect('%s/manage_workspace' - % dispatcher.absolute_url()) + REQUEST["RESPONSE"].redirect( + f"{dispatcher.absolute_url()}/manage_workspace" + ) -addMemCacheSessionDataContainerForm = PageTemplateFile('www/add_mcsdc.pt', - globals()) +addMemCacheSessionDataContainerForm = PageTemplateFile( + "www/add_mcsdc.pt", globals() +) diff --git a/src/Products/mcdutils/tests/__init__.py b/src/Products/mcdutils/tests/__init__.py deleted file mode 100644 index bccdfb3..0000000 --- a/src/Products/mcdutils/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -""" Unit tests for Products.mcdutils """ diff --git a/src/Products/mcdutils/tests/test_proxy.py b/src/Products/mcdutils/tests/test_proxy.py deleted file mode 100644 index f46d47a..0000000 --- a/src/Products/mcdutils/tests/test_proxy.py +++ /dev/null @@ -1,154 +0,0 @@ -############################################################################## -# -# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. -# -# This software is subject to the provisions of the Zope Public License, -# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -# FOR A PARTICULAR PURPOSE. -# -############################################################################# -""" Unit tests for Products.mcdutils.proxy """ -import unittest - - -KEY = b'key1' - - -class FauxClientTests(unittest.TestCase): - - def _getTargetClass(self): - from ..proxy import FauxClient - return FauxClient - - def _makeOne(self): - return self._getTargetClass()() - - def test_faux_client(self): - # Faux client only fakes out a few methods - fc = self._makeOne() - - self.assertEqual(fc._get_server(KEY), (fc, KEY)) - fc.set(KEY, 'value1') - self.assertEqual(fc._get_server(KEY), (fc, KEY)) - - -class MemCacheProxyTests(unittest.TestCase): - - def _getTargetClass(self): - from ..proxy import MemCacheProxy - return MemCacheProxy - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def _makeOneWithMemcache(self, *args, **kw): - from .helpers import DummyMemcache - proxy = self._getTargetClass()(*args, **kw) - proxy._v_client = DummyMemcache() - return proxy - - def test_conforms_to_IMemCacheProxy(self): - from zope.interface.verify import verifyClass - - from ..interfaces import IMemCacheProxy - verifyClass(IMemCacheProxy, self._getTargetClass()) - - def test__init__(self): - proxy = self._makeOne('proxy', title='Proxy') - - self.assertEqual(proxy.getId(), 'proxy') - self.assertEqual(proxy.servers, ()) - self.assertEqual(proxy.getProperty('servers'), ()) - self.assertEqual(proxy.title, 'Proxy') - self.assertEqual(proxy.getProperty('title'), 'Proxy') - - def test__cached(self): - proxy = self._makeOne('proxy') - - self.assertEqual(proxy._cached, {}) - - proxy._v_cached = {'foo': 'bar'} - self.assertEqual(proxy._cached, {'foo': 'bar'}) - - def test_client(self): - from memcache import Client - proxy = self._makeOne('proxy') - - self.assertIsNotNone(proxy.client) - - proxy._v_client = 'x' - self.assertEqual(proxy.client, 'x') - - # Set a server, which should create a real client instance - proxy.servers = ('127.0.0.1:9999',) - self.assertIsInstance(proxy.client, Client) - - def test__servers(self): - proxy = self._makeOne('proxy') - - self.assertEqual(proxy.servers, ()) - proxy.servers = ('srv',) - self.assertEqual(proxy.servers, ('srv',)) - - # make sure all caches are cleared - proxy._v_client = 'client' - proxy._v_cache = 'cache' - self.assertIsNotNone(getattr(proxy, '_v_client')) - self.assertIsNotNone(getattr(proxy, '_v_cache')) - proxy.servers = ('srv',) - self.assertIsNone(getattr(proxy, '_v_client', None)) - self.assertIsNone(getattr(proxy, '_v_cache', None)) - - def test_create(self): - from ..mapping import MemCacheMapping - proxy = self._makeOne('proxy') - - created = proxy.create(KEY) - self.assertIsInstance(created, MemCacheMapping) - - def test_get_set(self): - proxy = self._makeOneWithMemcache('proxy') - - self.assertIsNone(proxy.get(KEY)) - self.assertTrue(proxy.set(KEY, proxy.create(KEY))) - self.assertEqual(proxy.get(KEY), {}) - - # This should also work when setting values that are - # not MemCacheMapping instances - KEY2 = b'key2' - self.assertIsNone(proxy.get(KEY2)) - self.assertTrue(proxy.set(KEY2, {'foo': 'bar'})) - self.assertEqual(proxy.get(KEY2), {'foo': 'bar'}) - - def test_get_multi(self): - proxy = self._makeOneWithMemcache('proxy') - - self.assertEqual(proxy.get_multi([KEY, b'key2']), - {KEY: None, b'key2': None}) - - def test_add(self): - proxy = self._makeOneWithMemcache('proxy') - - self.assertTrue(proxy.add(KEY, proxy.create(KEY))) - self.assertEqual(proxy.get(KEY), {}) - - def test_replace(self): - proxy = self._makeOneWithMemcache('proxy') - - self.assertIsNone(proxy.replace(KEY, proxy.create(KEY))) - self.assertIsNone(proxy.get(KEY)) - - self.assertTrue(proxy.set(KEY, proxy.create(KEY))) - self.assertEqual(proxy.get(KEY), {}) - - def test_delete(self): - proxy = self._makeOneWithMemcache('proxy') - - self.assertIsNone(proxy.delete(KEY), proxy.create(KEY)) - - self.assertTrue(proxy.set(KEY, proxy.create(KEY))) - self.assertTrue(proxy.delete(KEY)) - self.assertIsNone(proxy.get(KEY)) diff --git a/src/Products/mcdutils/tests/test_zcache.py b/src/Products/mcdutils/tests/test_zcache.py deleted file mode 100644 index 0c70bd9..0000000 --- a/src/Products/mcdutils/tests/test_zcache.py +++ /dev/null @@ -1,233 +0,0 @@ -############################################################################## -# -# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. -# -# This software is subject to the provisions of the Zope Public License, -# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -# FOR A PARTICULAR PURPOSE. -# -############################################################################# -""" Unit tests for Products.mcdutils.zcache """ -import unittest - - -class TestsOf_aggregateKey(unittest.TestCase): - - def test_defaults(self): - from ..zcache import aggregateKey - key = aggregateKey(DummyOb()) - self.assertEqual(key, '%s|||' % _DUMMY_PATH_STR) - - def test_explicit_view_name(self): - from ..zcache import aggregateKey - key = aggregateKey(DummyOb(), view_name='VIEW_NAME') - self.assertEqual(key, '%s|VIEW_NAME||' % _DUMMY_PATH_STR) - - def test_explicit_request_names(self): - from ..zcache import aggregateKey - key = aggregateKey(DummyOb(), - request={'aaa': 'AAA', - 'bbb': 'BBB', - 'ccc': 'CCC'}, - request_names=['aaa', 'ccc']) - self.assertEqual(key, '%s||aaa:AAA,ccc:CCC|' % _DUMMY_PATH_STR) - - def test_explicit_local_keys(self): - from ..zcache import aggregateKey - key = aggregateKey(DummyOb(), local_keys={'foo': 'bar', 'baz': 'bam'}) - self.assertEqual(key, '%s|||baz:bam,foo:bar' % _DUMMY_PATH_STR) - - -class MemCacheZCacheTests(unittest.TestCase): - - def _getTargetClass(self): - from ..zcache import MemCacheZCache - return MemCacheZCache - - def _makeOne(self, proxy, request_names=(), *args, **kw): - mczc = self._getTargetClass()(proxy, request_names, *args, **kw) - return mczc - - def test_conforms_to_IZCache(self): - from zope.interface.verify import verifyClass - - from ..interfaces import IZCache - - verifyClass(IZCache, self._getTargetClass()) - - def test_ZCache_get_cache_miss(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - self.assertEqual(cache.ZCache_get(DummyOb()), None) - - def test_ZCache_get_cache_hit_default_args(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - proxy._cached['%s|||' % _DUMMY_PATH_STR] = 'XYZZY' - - self.assertEqual(cache.ZCache_get(DummyOb()), 'XYZZY') - - def test_ZCache_get_cache_hit_view_name(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - proxy._cached['%s|||' % _DUMMY_PATH_STR] = 'XYZZY' - proxy._cached['%s|foo||' % _DUMMY_PATH_STR] = 'ABCDEF' - - self.assertEqual(cache.ZCache_get(DummyOb(), view_name='foo'), - 'ABCDEF') - - def test_ZCache_get_cache_miss_view_name(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - proxy._cached['%s|||' % _DUMMY_PATH_STR] = 'XYZZY' - proxy._cached['%s|foo||' % _DUMMY_PATH_STR] = 'ABCDEF' - - self.assertEqual(cache.ZCache_get(DummyOb(), view_name='bar'), None) - - def test_ZCache_get_cache_hit_request_names(self): - proxy = DummyProxy() - cache = self._makeOne(proxy, request_names=('bar', 'qux')) - - proxy._cached['%s|||' % _DUMMY_PATH_STR] = 'XYZZY' - proxy._cached['%s||bar:baz,qux:|' % _DUMMY_PATH_STR] = 'ABCDEF' - - ob = DummyOb() - ob.REQUEST = {'bar': 'baz', 'bam': 'bif'} - - self.assertEqual(cache.ZCache_get(ob), 'ABCDEF') - - def test_ZCache_invalidate(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - _cached = proxy._cached - proxy._cached['%s|||' % _DUMMY_PATH_STR] = 'XYZZY' - proxy._cached['%s|foo||' % _DUMMY_PATH_STR] = 'ABCDEF' - proxy._cached['%s|bar||' % _DUMMY_PATH_STR] = 'LMNOP' - - keys = _cached.keys() - _cached[_DUMMY_PATH_STR] = {k: 1 for k in keys} - - cache.ZCache_invalidate(DummyOb()) - - self.assertEqual(len(_cached), 0) - - def test_ZCache_set_simple(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - _cached = proxy._cached - - cache.ZCache_set(DummyOb(), 'XYZZY') - - self.assertEqual(len(_cached), 2) - key = '%s|||' % _DUMMY_PATH_STR - self.assertTrue(key in _cached[_DUMMY_PATH_STR]) - self.assertEqual(_cached[key], 'XYZZY') - - def test_ZCache_set_with_view_name(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - _cached = proxy._cached - - cache.ZCache_set(DummyOb(), 'XYZZY', view_name='v') - - self.assertEqual(len(_cached), 2) - key = '%s|v||' % _DUMMY_PATH_STR - self.assertTrue(key in _cached[_DUMMY_PATH_STR]) - self.assertEqual(_cached[key], 'XYZZY') - - def test_ZCache_set_replacing(self): - proxy = DummyProxy() - cache = self._makeOne(proxy) - - _cached = proxy._cached - key1 = '%s|||' % _DUMMY_PATH_STR - key2 = '%s|v||' % _DUMMY_PATH_STR - _cached[_DUMMY_PATH_STR] = {key1: 1, key2: 1} - _cached[key1] = 'GHIJKL' - _cached[key2] = 'ABCDE' - - cache.ZCache_set(DummyOb(), 'XYZZY', view_name='v') - - self.assertEqual(len(_cached), 3) - - self.assertTrue(key1 in _cached[_DUMMY_PATH_STR]) - self.assertEqual(_cached[key1], 'GHIJKL') - - self.assertTrue(key2 in _cached[_DUMMY_PATH_STR]) - self.assertEqual(_cached[key2], 'XYZZY') - - -class MemCacheZCacheManagerTests(unittest.TestCase): - - def _getTargetClass(self): - from ..zcache import MemCacheZCacheManager - return MemCacheZCacheManager - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_conforms_to_IZCacheManager(self): - from zope.interface.verify import verifyClass - - from ..interfaces import IZCacheManager - - verifyClass(IZCacheManager, self._getTargetClass()) - - def test__init__(self): - mgr = self._makeOne('zcache', title='ZCache Manager') - - self.assertEqual(mgr.getId(), 'zcache') - self.assertEqual(mgr.title, 'ZCache Manager') - self.assertEqual(mgr.getProperty('title'), 'ZCache Manager') - self.assertEqual(mgr.getProperty('proxy_path'), '') - self.assertEqual(mgr.getProperty('request_names'), ()) - - def test_ZCacheManager_getCache_with_proxy(self): - mgr = self._makeOne('zcache') - mgr.dummy_proxy = DummyProxy() - mgr.proxy_path = 'dummy_proxy' - mgr.request_names = ('foo', 'bar') - - cache = mgr.ZCacheManager_getCache() - - self.assertEqual(cache.proxy, mgr.dummy_proxy) - self.assertEqual(cache.request_names, ('bar', 'foo')) - - -_DUMMY_PATH = ('path', 'to', 'dummy') -_DUMMY_PATH_STR = '/'.join(_DUMMY_PATH) - - -class DummyOb: - def getPhysicalPath(self): - return _DUMMY_PATH - - -class DummyProxy: - def __init__(self): - self._cached = {} - - def set(self, key, value): - self._cached[key] = value - - def _get(self, key, default=None): - return self._cached.get(key, default) - - get = _get - - def delete(self, key, time=0): - try: - del self._cached[key] - return True - except KeyError: - return False diff --git a/src/Products/mcdutils/www/add_items.pt b/src/Products/mcdutils/www/add_items.pt index 059fae5..733864b 100644 --- a/src/Products/mcdutils/www/add_items.pt +++ b/src/Products/mcdutils/www/add_items.pt @@ -2,31 +2,45 @@

TABS

+ tal:define=" + updated context/addItemsToSession; + " +> -

Test Adding Items to Session

+

Test Adding Items to Session +

-
+ Add Items to Session as space-separated key-value pairs line by line - +
- -
- - - + +
+
+ +
@@ -44,4 +58,3 @@

FOOTER

- diff --git a/src/Products/mcdutils/www/add_mcp.pt b/src/Products/mcdutils/www/add_mcp.pt index 54feb87..28b576b 100644 --- a/src/Products/mcdutils/www/add_mcp.pt +++ b/src/Products/mcdutils/www/add_mcp.pt @@ -2,29 +2,47 @@
-

Add a MemCacheProxy

- -
-
- -
- -
+

Add a MemCacheProxy +

+ + +
+ +
+
- -
- -
- -
-
- -
- +
+ +
+ +
+
+
+ +
+ +
diff --git a/src/Products/mcdutils/www/add_mcsdc.pt b/src/Products/mcdutils/www/add_mcsdc.pt index acf1944..6cdc479 100644 --- a/src/Products/mcdutils/www/add_mcsdc.pt +++ b/src/Products/mcdutils/www/add_mcsdc.pt @@ -2,29 +2,47 @@
-

Add a MemCacheSessionDataContainer

- -
-
- -
- -
+

Add a MemCacheSessionDataContainer +

+ + +
+ +
+
- -
- -
- -
-
- -
- +
+ +
+ +
+
+
+ +
+ +
diff --git a/src/Products/mcdutils/www/add_mczcm.pt b/src/Products/mcdutils/www/add_mczcm.pt index 6c174c2..def05f6 100644 --- a/src/Products/mcdutils/www/add_mczcm.pt +++ b/src/Products/mcdutils/www/add_mczcm.pt @@ -4,27 +4,44 @@

Add a MemCacheZCacheManager

-
-
- -
- -
+ +
+ +
+
- -
- -
- -
-
- -
- +
+ +
+ +
+
+
+ +
+ +
diff --git a/src/Products/mcdutils/zcache.py b/src/Products/mcdutils/zcache.py index fcafda3..d568961 100644 --- a/src/Products/mcdutils/zcache.py +++ b/src/Products/mcdutils/zcache.py @@ -10,7 +10,10 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" RAMCacheManager workalike using memcache """ +"""RAMCacheManager workalike using memcache""" + +from .interfaces import IZCache +from .interfaces import IZCacheManager from AccessControl.class_init import InitializeClass from AccessControl.SecurityInfo import ClassSecurityInfo from OFS.Cache import CacheManager @@ -20,13 +23,11 @@ from zope.interface import implementedBy from zope.interface import implementer -from .interfaces import IZCache -from .interfaces import IZCacheManager - -def aggregateKey(ob, view_name='', request=None, request_names=(), - local_keys=None): - """ Return a key to be used when retrieving or inserting a cache entry. +def aggregateKey( + ob, view_name="", request=None, request_names=(), local_keys=None +): + """Return a key to be used when retrieving or inserting a cache entry. o 'ob' is the object for whom the key is desired. @@ -39,7 +40,7 @@ def aggregateKey(ob, view_name='', request=None, request_names=(), o 'local_keys' is a mapping or None. """ - path = '/'.join(ob.getPhysicalPath()) + path = "/".join(ob.getPhysicalPath()) request_index = [] local_index = [] if request is None: @@ -48,24 +49,27 @@ def aggregateKey(ob, view_name='', request=None, request_names=(), local_keys = {} for key in request_names: - val = request.get(key, '') - request_index.append(f'{key}:{val}') + val = request.get(key, "") + request_index.append(f"{key}:{val}") for key, val in local_keys.items(): - local_index.append(f'{key}:{val}') + local_index.append(f"{key}:{val}") - full_key = '|'.join((path, str(view_name), - ','.join(request_index), - ','.join(sorted(local_index)))) + full_key = "|".join(( + path, + str(view_name), + ",".join(request_index), + ",".join(sorted(local_index)), + )) # Memcache does not like blank spaces in keys - return full_key.replace(' ', '_') + return full_key.replace(" ", "_") @implementer(IZCache) class MemCacheZCache: - """ Implement ISDC via a memcache proxy. - """ + """Implement ISDC via a memcache proxy.""" + security = ClassSecurityInfo() security.declareObjectPrivate() @@ -74,9 +78,8 @@ def __init__(self, proxy, request_names): self.request_names = request_names def ZCache_invalidate(self, ob): - """ See IZCache. - """ - path = '/'.join(ob.getPhysicalPath()) + """See IZCache.""" + path = "/".join(ob.getPhysicalPath()) proxy = self.proxy keys = proxy.get(path) if keys is None: @@ -85,10 +88,10 @@ def ZCache_invalidate(self, ob): proxy.delete(key) proxy.delete(path) - def ZCache_get(self, ob, view_name='', keywords=None, mtime_func=None, - default=None): - """ See IZCache. - """ + def ZCache_get( + self, ob, view_name="", keywords=None, mtime_func=None, default=None + ): + """See IZCache.""" key = self._getKey(ob, view_name, keywords) value = self.proxy.get(key) @@ -98,11 +101,11 @@ def ZCache_get(self, ob, view_name='', keywords=None, mtime_func=None, return value - def ZCache_set(self, ob, data, view_name='', keywords=None, - mtime_func=None): - """ See IZCache. - """ - path = '/'.join(ob.getPhysicalPath()) + def ZCache_set( + self, ob, data, view_name="", keywords=None, mtime_func=None + ): + """See IZCache.""" + path = "/".join(ob.getPhysicalPath()) proxy = self.proxy key = self._getKey(ob, view_name, keywords) proxy.set(key, data) @@ -116,45 +119,47 @@ def ZCache_set(self, ob, data, view_name='', keywords=None, def _getKey(self, ob, view_name, keywords): rnames = self.request_names - if rnames: - request = getattr(ob, 'REQUEST', {}) - else: - request = {} - + request = getattr(ob, "REQUEST", {}) if rnames else {} return aggregateKey(ob, view_name, request, rnames, keywords) InitializeClass(MemCacheZCache) -@implementer(IZCacheManager + implementedBy(CacheManager) - + implementedBy(SimpleItem) + implementedBy(PropertyManager)) +@implementer( + IZCacheManager + + implementedBy(CacheManager) + + implementedBy(SimpleItem) + + implementedBy(PropertyManager) +) class MemCacheZCacheManager(CacheManager, SimpleItem, PropertyManager): - """ Implement ISDC via a memcache proxy. - """ + """Implement ISDC via a memcache proxy.""" + security = ClassSecurityInfo() _v_proxy = None - proxy_path = '' + proxy_path = "" request_names = () - zmi_icon = 'fas fa-forward' + zmi_icon = "fas fa-forward" # # ZMI # - meta_type = 'MemCache Cache Manager' + meta_type = "MemCache Cache Manager" _properties = ( - {'id': 'title', 'type': 'string', 'mode': 'w'}, - {'id': 'proxy_path', 'type': 'string', 'mode': 'w'}, - {'id': 'request_names', 'type': 'lines', 'mode': 'w'}, + {"id": "title", "type": "string", "mode": "w"}, + {"id": "proxy_path", "type": "string", "mode": "w"}, + {"id": "request_names", "type": "lines", "mode": "w"}, ) - manage_options = (PropertyManager.manage_options - + CacheManager.manage_options - + SimpleItem.manage_options) + manage_options = ( + PropertyManager.manage_options + + CacheManager.manage_options + + SimpleItem.manage_options + ) - def __init__(self, id, title=''): - self.id = id + def __init__(self, id_, title=""): + self.id = id_ self.title = title def _get_proxy(self): @@ -162,13 +167,13 @@ def _get_proxy(self): if not self.proxy_path: # import late to avoid cycle from . import MemCacheError - raise MemCacheError('No proxy defined') + + raise MemCacheError("No proxy defined") self._v_proxy = self.unrestrictedTraverse(self.proxy_path) return self._v_proxy def ZCacheManager_getCache(self): - """ See IZCacheManager. - """ + """See IZCacheManager.""" names = list(self.request_names) names.sort() return MemCacheZCache(self._get_proxy(), tuple(names)) @@ -177,14 +182,14 @@ def ZCacheManager_getCache(self): InitializeClass(MemCacheZCacheManager) -def addMemCacheZCacheManager(dispatcher, id, title='', REQUEST=None): - """ Add a MCSDC to dispatcher. - """ +def addMemCacheZCacheManager(dispatcher, id, title="", REQUEST=None): # noqa: A002 + """Add a MCSDC to dispatcher.""" dispatcher._setObject(id, MemCacheZCacheManager(id, title=title)) if REQUEST is not None: - REQUEST['RESPONSE'].redirect('%s/manage_workspace' - % dispatcher.absolute_url()) + REQUEST["RESPONSE"].redirect( + f"{dispatcher.absolute_url()}/manage_workspace" + ) -addMemCacheZCacheManagerForm = PageTemplateFile('www/add_mczcm.pt', globals()) +addMemCacheZCacheManagerForm = PageTemplateFile("www/add_mczcm.pt", globals()) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/Products/mcdutils/ftests/functest.py b/tests/ftests/functest.py similarity index 78% rename from src/Products/mcdutils/ftests/functest.py rename to tests/ftests/functest.py index 54378c2..ba4b1b1 100644 --- a/src/Products/mcdutils/ftests/functest.py +++ b/tests/ftests/functest.py @@ -12,21 +12,21 @@ ############################################################################# # Run this test from 'zopectl run' # Requires that we are running a memcached on localhost, port 11211 -import transaction +from Products.mcdutils.proxy import MemCacheProxy -from .proxy import MemCacheProxy +import transaction -proxy = MemCacheProxy(['localhost:11211']) +proxy = MemCacheProxy(["localhost:11211"]) -session = proxy.new_or_existing('foobar') +session = proxy.new_or_existing("foobar") print(session) -session['abc'] = 123 +session["abc"] = 123 print(session) transaction.commit() -proxy2 = MemCacheProxy(['localhost:11211']) +proxy2 = MemCacheProxy(["localhost:11211"]) -print(proxy2.get('foobar')) +print(proxy2.get("foobar")) diff --git a/tests/ftests/test_proxy_functional.py b/tests/ftests/test_proxy_functional.py new file mode 100644 index 0000000..39e91ed --- /dev/null +++ b/tests/ftests/test_proxy_functional.py @@ -0,0 +1,70 @@ +############################################################################## +# +# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################# +"""Functional tests for Products.mcdutils.proxy""" + + +class TestMemCacheSDCFunc: + def _makeOne(self): + from Products.mcdutils.proxy import MemCacheProxy + from Products.mcdutils.sessiondata import MemCacheSessionDataContainer + + sdc = MemCacheSessionDataContainer("mcsdc") + sdc.mcproxy = MemCacheProxy("mcproxy") + sdc.proxy_path = "mcproxy" + + return sdc + + def test_writing_to_mapping_no_memcache(self): + from Products.mcdutils.mapping import MemCacheMapping + + sdc = self._makeOne() + mapping = sdc.new_or_existing("foobar") + assert isinstance(mapping, MemCacheMapping) + assert not mapping._p_changed + assert not mapping._p_joined + mapping["abc"] = 1345 + assert mapping._p_changed + assert mapping._p_joined + import transaction + + transaction.commit() + + def test_writing_to_mapping_with_memcache(self): + from Products.mcdutils.mapping import MemCacheMapping + + sdc = self._makeOne() + sdc._get_proxy().servers = ("localhost:11211",) + mapping = sdc.new_or_existing("foobar") + assert isinstance(mapping, MemCacheMapping) + assert not mapping._p_changed + assert not mapping._p_joined + mapping["abc"] = 1345 + assert mapping._p_changed + assert mapping._p_joined + import transaction + + transaction.commit() + + def test_writing_to_mapping_with_invalid_memcache_raises(self): + from Products.mcdutils import MemCacheError + + sdc = self._makeOne() + sdc._get_proxy().servers = ("nonesuch:999999",) + mapping = sdc.new_or_existing("foobar") + mapping["abc"] = 1345 + import pytest + import transaction + + with pytest.raises(MemCacheError): + transaction.commit() + transaction.abort() diff --git a/src/Products/mcdutils/tests/helpers.py b/tests/helpers.py similarity index 94% rename from src/Products/mcdutils/tests/helpers.py rename to tests/helpers.py index 0468ec1..bc34c4f 100644 --- a/src/Products/mcdutils/tests/helpers.py +++ b/tests/helpers.py @@ -10,14 +10,12 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" Unit test helper modules """ class DummyMemcache(dict): - def _assertKeyBinary(self, key): if not isinstance(key, bytes): - raise ValueError('Key must be binary string.') + raise ValueError("Key must be binary string.") return key def set(self, key, value): diff --git a/src/Products/mcdutils/tests/test_mapping.py b/tests/test_mapping.py similarity index 54% rename from src/Products/mcdutils/tests/test_mapping.py rename to tests/test_mapping.py index 920376a..d9362d1 100644 --- a/src/Products/mcdutils/tests/test_mapping.py +++ b/tests/test_mapping.py @@ -10,14 +10,14 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" Unit tests for Products.mcdutils.mapping """ -import unittest +import contextlib -class MemCacheMappingSavepointTests(unittest.TestCase): +class TestMemCacheMappingSavepoint: def _getTargetClass(self): - from ..mapping import MemCacheMappingSavepoint + from Products.mcdutils.mapping import MemCacheMappingSavepoint + return MemCacheMappingSavepoint def _makeOne(self, *args, **kw): @@ -26,19 +26,20 @@ def _makeOne(self, *args, **kw): def test_conforms_to_IDataManagerSavepoint(self): from transaction.interfaces import IDataManagerSavepoint from zope.interface.verify import verifyClass + verifyClass(IDataManagerSavepoint, self._getTargetClass()) def test_rollback(self): # This doesn't really do anything. Just verifying the # method is there and doesn't blow up when called. sp = self._makeOne() - self.assertFalse(sp.rollback()) - + assert not sp.rollback() -class MemCacheMappingTests(unittest.TestCase): +class TestMemCacheMapping: def _getTargetClass(self): from Products.mcdutils.mapping import MemCacheMapping + return MemCacheMapping def _makeOne(self, *args, **kw): @@ -47,97 +48,99 @@ def _makeOne(self, *args, **kw): def test_conforms_to_ISavepointDataManager(self): from transaction.interfaces import ISavepointDataManager from zope.interface.verify import verifyClass + verifyClass(ISavepointDataManager, self._getTargetClass()) def test___setitem___triggers_register(self): - mapping = self._makeOne('key', DummyProxy()) - self.assertFalse(mapping._p_changed) - self.assertFalse(mapping._p_joined) - mapping['abc'] = 123 - self.assertTrue(mapping._p_changed) - self.assertTrue(mapping._p_joined) + mapping = self._makeOne("key", DummyProxy()) + assert not mapping._p_changed + assert not mapping._p_joined + mapping["abc"] = 123 + assert mapping._p_changed + assert mapping._p_joined def test_has_key(self): # Added in for backwards-compatibility under Python 3 - mapping = self._makeOne('key', DummyProxy()) + mapping = self._makeOne("key", DummyProxy()) - self.assertFalse(mapping.has_key('foo')) # NOQA: W601 - mapping['foo'] = 'bar' - self.assertTrue(mapping.has_key('foo')) # NOQA: W601 + assert not mapping.has_key("foo") + mapping["foo"] = "bar" + assert mapping.has_key("foo") def test__getstate__and__setstate__(self): - mapping = self._makeOne('key', DummyProxy()) + mapping = self._makeOne("key", DummyProxy()) - self.assertEqual(mapping.__getstate__(), {}) - mapping.__setstate__({'foo': 'bar'}) - self.assertEqual(mapping.__getstate__(), {'foo': 'bar'}) + assert mapping.__getstate__() == {} + mapping.__setstate__({"foo": "bar"}) + assert mapping.__getstate__() == {"foo": "bar"} def test_getContainerKey(self): - mapping = self._makeOne('key', DummyProxy()) + mapping = self._makeOne("key", DummyProxy()) - self.assertEqual(mapping.getContainerKey(), 'key') + assert mapping.getContainerKey() == "key" def test_clean(self): proxy = DummyProxy() - proxy._set('key', 'myvalue') - mapping = self._makeOne('key', proxy) + proxy._set("key", "myvalue") + mapping = self._makeOne("key", proxy) - self.assertIn('key', proxy._cached) + assert "key" in proxy._cached mapping._clean() - self.assertNotIn('key', proxy._cached) + assert "key" not in proxy._cached # Cleaning again won't throw errors - self.assertIsNone(mapping._clean()) + assert mapping._clean() is None def test_abort(self): proxy = DummyProxy() - proxy._set('key', 'myvalue') - mapping = self._makeOne('key', proxy) + proxy._set("key", "myvalue") + mapping = self._makeOne("key", proxy) - self.assertIn('key', proxy._cached) + assert "key" in proxy._cached mapping.abort(None) - self.assertNotIn('key', proxy._cached) + assert "key" not in proxy._cached def test_savepoint(self): from Products.mcdutils.mapping import MemCacheMappingSavepoint - mapping = self._makeOne('key', DummyProxy()) + + mapping = self._makeOne("key", DummyProxy()) sp = mapping.savepoint() - self.assertIsInstance(sp, MemCacheMappingSavepoint) + assert isinstance(sp, MemCacheMappingSavepoint) def test_sortKey(self): - mapping = self._makeOne('key', DummyProxy()) + mapping = self._makeOne("key", DummyProxy()) - self.assertEqual(mapping.sortKey(), 'MemCacheMapping: key') + assert mapping.sortKey() == "MemCacheMapping: key" def test_repr(self): - KEYS = ('__ac_password', 'passwd', 'password') + KEYS = ("__ac_password", "passwd", "password") proxy = DummyProxy() - proxy._set('key', 'myvalue') - mapping = self._makeOne('key', proxy) + proxy._set("key", "myvalue") + mapping = self._makeOne("key", proxy) for pw_key in KEYS: - mapping[pw_key] = 'thisisapw' - mapping['normal'] = 'normalvalue' + mapping[pw_key] = "thisisapw" + mapping["normal"] = "normalvalue" mapping_repr = repr(mapping) - self.assertNotIn('thisisapw', mapping_repr) + assert "thisisapw" not in mapping_repr for pw_key in KEYS: - self.assertIn("'%s': ''" % pw_key, mapping_repr) - self.assertIn("'normal': 'normalvalue'", mapping_repr) + assert f"'{pw_key}': ''" in mapping_repr + assert "'normal': 'normalvalue'" in mapping_repr def test_invalidate(self): """Tests invalidate method""" proxy = DummyProxy() - proxy._set('key', 'myvalue') - mapping = self._makeOne('key', proxy) + proxy._set("key", "myvalue") + mapping = self._makeOne("key", proxy) - self.assertIn('key', proxy._cached) + assert "key" in proxy._cached mapping.invalidate() - self.assertNotIn('key', proxy._cached) + assert "key" not in proxy._cached # Cleaning again won't throw errors - self.assertIsNone(mapping.invalidate()) + assert mapping.invalidate() is None class DummyClient: @@ -146,7 +149,6 @@ def _get_server(self, key): class DummyProxy: - def __init__(self): self._cached = {} @@ -154,15 +156,11 @@ def _set(self, key, value): self._cached[key] = value def _clean(self, key): - try: + with contextlib.suppress(KeyError): del self._cached[key] - except KeyError: - pass def delete(self, key): - try: + with contextlib.suppress(KeyError): del self._cached[key] - except KeyError: - pass client = DummyClient() diff --git a/tests/test_proxy.py b/tests/test_proxy.py new file mode 100644 index 0000000..c043ae0 --- /dev/null +++ b/tests/test_proxy.py @@ -0,0 +1,154 @@ +############################################################################## +# +# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################# + +KEY = b"key1" + + +class TestFauxClient: + def _getTargetClass(self): + from Products.mcdutils.proxy import FauxClient + + return FauxClient + + def _makeOne(self): + return self._getTargetClass()() + + def test_faux_client(self): + # Faux client only fakes out a few methods + fc = self._makeOne() + + assert fc._get_server(KEY) == (fc, KEY) + fc.set(KEY, "value1") + assert fc._get_server(KEY) == (fc, KEY) + + +class TestMemCacheProxy: + def _getTargetClass(self): + from Products.mcdutils.proxy import MemCacheProxy + + return MemCacheProxy + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeOneWithMemcache(self, *args, **kw): + from .helpers import DummyMemcache + + proxy = self._getTargetClass()(*args, **kw) + proxy._v_client = DummyMemcache() + return proxy + + def test_conforms_to_IMemCacheProxy(self): + from Products.mcdutils.interfaces import IMemCacheProxy + from zope.interface.verify import verifyClass + + verifyClass(IMemCacheProxy, self._getTargetClass()) + + def test__init__(self): + proxy = self._makeOne("proxy", title="Proxy") + + assert proxy.getId() == "proxy" + assert proxy.servers == () + assert proxy.getProperty("servers") == () + assert proxy.title == "Proxy" + assert proxy.getProperty("title") == "Proxy" + + def test__cached(self): + proxy = self._makeOne("proxy") + + assert proxy._cached == {} + + proxy._v_cached = {"foo": "bar"} + assert proxy._cached == {"foo": "bar"} + + def test_client(self): + from memcache import Client + + proxy = self._makeOne("proxy") + + assert proxy.client is not None + + proxy._v_client = "x" + assert proxy.client == "x" + + # Set a server, which should create a real client instance + proxy.servers = ("127.0.0.1:9999",) + assert isinstance(proxy.client, Client) + + def test__servers(self): + proxy = self._makeOne("proxy") + + assert proxy.servers == () + proxy.servers = ("srv",) + assert proxy.servers == ("srv",) + + # make sure all caches are cleared + proxy._v_client = "client" + proxy._v_cache = "cache" + assert proxy._v_client is not None + assert proxy._v_cache is not None + proxy.servers = ("srv",) + assert getattr(proxy, "_v_client", None) is None + assert getattr(proxy, "_v_cache", None) is None + + def test_create(self): + from Products.mcdutils.mapping import MemCacheMapping + + proxy = self._makeOne("proxy") + + created = proxy.create(KEY) + assert isinstance(created, MemCacheMapping) + + def test_get_set(self): + proxy = self._makeOneWithMemcache("proxy") + + assert proxy.get(KEY) is None + assert proxy.set(KEY, proxy.create(KEY)) + assert proxy.get(KEY) == {} + + # This should also work when setting values that are + # not MemCacheMapping instances + KEY2 = b"key2" + assert proxy.get(KEY2) is None + assert proxy.set(KEY2, {"foo": "bar"}) + assert proxy.get(KEY2) == {"foo": "bar"} + + def test_get_multi(self): + proxy = self._makeOneWithMemcache("proxy") + + assert proxy.get_multi([KEY, b"key2"]) == {KEY: None, b"key2": None} + + def test_add(self): + proxy = self._makeOneWithMemcache("proxy") + + assert proxy.add(KEY, proxy.create(KEY)) + assert proxy.get(KEY) == {} + + def test_replace(self): + proxy = self._makeOneWithMemcache("proxy") + + assert proxy.replace(KEY, proxy.create(KEY)) is None + assert proxy.get(KEY) is None + + assert proxy.set(KEY, proxy.create(KEY)) + assert proxy.get(KEY) == {} + + def test_delete(self): + proxy = self._makeOneWithMemcache("proxy") + + assert proxy.delete(KEY) is None + assert proxy.create(KEY) == {} + + assert proxy.set(KEY, proxy.create(KEY)) + assert proxy.delete(KEY) + assert proxy.get(KEY) is None diff --git a/tests/test_py_typed_marker.py b/tests/test_py_typed_marker.py new file mode 100644 index 0000000..46deb24 --- /dev/null +++ b/tests/test_py_typed_marker.py @@ -0,0 +1,6 @@ +def test_py_typed_marker_exists(): + import pathlib + import Products.mcdutils as pkg + + root = pathlib.Path(pkg.__file__).parent + assert (root / "py.typed").exists() diff --git a/tests/test_reconnecting_export.py b/tests/test_reconnecting_export.py new file mode 100644 index 0000000..3cd856d --- /dev/null +++ b/tests/test_reconnecting_export.py @@ -0,0 +1,24 @@ +############################################################################## +# Complementary tests for reconnecting metrics export. +############################################################################### +import contextlib +import os +import tempfile + + +def test_export_prometheus_textfile_creates_file(): + from Products.mcdutils import reconnecting as r + + # ensure counters change a bit + r._metrics["reconnect_attempts_total"] += 1 + fd, path = tempfile.mkstemp(prefix="mcdutils_metrics_", suffix=".prom") + os.close(fd) + try: + r.export_prometheus_textfile(path) + with open(path, encoding="utf-8") as f: + content = f.read() + assert "mcdutils_reconnect_attempts_total" in content + assert "mcdutils_retry_calls_total" in content + finally: + with contextlib.suppress(OSError): + os.remove(path) diff --git a/src/Products/mcdutils/tests/test_sessiondata.py b/tests/test_sessiondata.py similarity index 56% rename from src/Products/mcdutils/tests/test_sessiondata.py rename to tests/test_sessiondata.py index 88cf9b8..9ddf57b 100644 --- a/src/Products/mcdutils/tests/test_sessiondata.py +++ b/tests/test_sessiondata.py @@ -10,8 +10,6 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################# -""" Unit tests for Products.mcdutils.sessiondata """ -import unittest class DummyClient: @@ -32,60 +30,70 @@ def _get(self, key, default=None): get = _get -class MemCacheSessionDataTests(unittest.TestCase): - +class TestMemCacheSessionData: def _getTargetClass(self): - from ..sessiondata import MemCacheSessionDataContainer + from Products.mcdutils.sessiondata import MemCacheSessionDataContainer + return MemCacheSessionDataContainer - def _makeOne(self, id, title='', with_proxy=True): - sdc = self._getTargetClass()(id, title=title) + def _makeOne(self, id_, title="", with_proxy=True): + sdc = self._getTargetClass()(id_, title=title) if with_proxy: sdc.dummy_proxy = DummyProxy() - sdc.proxy_path = 'dummy_proxy' + sdc.proxy_path = "dummy_proxy" return sdc def test_conforms_to_ISessionDataContainer(self): + from Products.mcdutils.interfaces import ISessionDataContainer from zope.interface.verify import verifyClass - from ..interfaces import ISessionDataContainer verifyClass(ISessionDataContainer, self._getTargetClass()) def test_conforms_to_IMemCacheSessionDataContainer(self): + from Products.mcdutils.interfaces import IMemCacheSessionDataContainer from zope.interface.verify import verifyClass - from ..interfaces import IMemCacheSessionDataContainer verifyClass(IMemCacheSessionDataContainer, self._getTargetClass()) def test_empty(self): - sdc = self._makeOne('mcsdc') - self.assertFalse(sdc.has_key('foobar')) # NOQA: W601 - self.assertIsNone(sdc.get('foobar')) + sdc = self._makeOne("mcsdc") + assert not sdc.has_key("foobar") + assert sdc.get("foobar") is None def test_invalid_proxy_raises_MemCacheError(self): - from .. import MemCacheError - sdc = self._makeOne('mcsdc', with_proxy=False) - self.assertRaises(MemCacheError, - sdc.has_key, 'foobar') # NOQA: W601 - self.assertRaises(MemCacheError, sdc.get, 'foobar') - self.assertRaises(MemCacheError, sdc.new_or_existing, 'foobar') + from Products.mcdutils import MemCacheError + + sdc = self._makeOne("mcsdc", with_proxy=False) + import pytest + + with pytest.raises(MemCacheError): + sdc.has_key("foobar") + import pytest + + with pytest.raises(MemCacheError): + sdc.get("foobar") + import pytest + + with pytest.raises(MemCacheError): + sdc.new_or_existing("foobar") def test_new_or_existing_returns_txn_aware_mapping(self): from persistent.mapping import PersistentMapping from transaction.interfaces import IDataManager - sdc = self._makeOne('mcsdc') - created = sdc.new_or_existing('foobar') - self.assertTrue(isinstance(created, PersistentMapping)) + + sdc = self._makeOne("mcsdc") + created = sdc.new_or_existing("foobar") + assert isinstance(created, PersistentMapping) jar = created._p_jar - self.assertFalse(jar is None) - self.assertTrue(IDataManager.providedBy(jar)) + assert jar is not None + assert IDataManager.providedBy(jar) def test_has_key_after_new_or_existing_returns_True(self): - sdc = self._makeOne('mcsdc') - sdc.new_or_existing('foobar') - self.assertTrue(sdc.has_key('foobar')) # NOQA: W601 + sdc = self._makeOne("mcsdc") + sdc.new_or_existing("foobar") + assert sdc.has_key("foobar") def test_get_after_new_or_existing_returns_same(self): - sdc = self._makeOne('mcsdc') - created = sdc.new_or_existing('foobar') - self.assertTrue(sdc.get('foobar') is created) + sdc = self._makeOne("mcsdc") + created = sdc.new_or_existing("foobar") + assert sdc.get("foobar") is created diff --git a/tests/test_zcache.py b/tests/test_zcache.py new file mode 100644 index 0000000..6c8c64a --- /dev/null +++ b/tests/test_zcache.py @@ -0,0 +1,231 @@ +############################################################################## +# +# Copyright (c) 2008-2023 Tres Seaver and Contributors. All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################# + + +class TestOf_aggregateKey: + def test_defaults(self): + from Products.mcdutils.zcache import aggregateKey + + key = aggregateKey(DummyOb()) + assert key == f"{_DUMMY_PATH_STR}|||" + + def test_explicit_view_name(self): + from Products.mcdutils.zcache import aggregateKey + + key = aggregateKey(DummyOb(), view_name="VIEW_NAME") + assert key == f"{_DUMMY_PATH_STR}|VIEW_NAME||" + + def test_explicit_request_names(self): + from Products.mcdutils.zcache import aggregateKey + + key = aggregateKey( + DummyOb(), + request={"aaa": "AAA", "bbb": "BBB", "ccc": "CCC"}, + request_names=["aaa", "ccc"], + ) + assert key == f"{_DUMMY_PATH_STR}||aaa:AAA,ccc:CCC|" + + def test_explicit_local_keys(self): + from Products.mcdutils.zcache import aggregateKey + + key = aggregateKey(DummyOb(), local_keys={"foo": "bar", "baz": "bam"}) + assert key == f"{_DUMMY_PATH_STR}|||baz:bam,foo:bar" + + +class TestMemCacheZCache: + def _getTargetClass(self): + from Products.mcdutils.zcache import MemCacheZCache + + return MemCacheZCache + + def _makeOne(self, proxy, request_names=(), *args, **kw): + mczc = self._getTargetClass()(proxy, request_names, *args, **kw) + return mczc + + def test_conforms_to_IZCache(self): + from Products.mcdutils.interfaces import IZCache + from zope.interface.verify import verifyClass + + verifyClass(IZCache, self._getTargetClass()) + + def test_ZCache_get_cache_miss(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + assert cache.ZCache_get(DummyOb()) is None + + def test_ZCache_get_cache_hit_default_args(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + proxy._cached[f"{_DUMMY_PATH_STR}|||"] = "XYZZY" + + assert cache.ZCache_get(DummyOb()) == "XYZZY" + + def test_ZCache_get_cache_hit_view_name(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + proxy._cached[f"{_DUMMY_PATH_STR}|||"] = "XYZZY" + proxy._cached[f"{_DUMMY_PATH_STR}|foo||"] = "ABCDEF" + + assert cache.ZCache_get(DummyOb(), view_name="foo") == "ABCDEF" + + def test_ZCache_get_cache_miss_view_name(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + proxy._cached[f"{_DUMMY_PATH_STR}|||"] = "XYZZY" + proxy._cached[f"{_DUMMY_PATH_STR}|foo||"] = "ABCDEF" + + assert cache.ZCache_get(DummyOb(), view_name="bar") is None + + def test_ZCache_get_cache_hit_request_names(self): + proxy = DummyProxy() + cache = self._makeOne(proxy, request_names=("bar", "qux")) + + proxy._cached[f"{_DUMMY_PATH_STR}|||"] = "XYZZY" + proxy._cached[f"{_DUMMY_PATH_STR}||bar:baz,qux:|"] = "ABCDEF" + + ob = DummyOb() + ob.REQUEST = {"bar": "baz", "bam": "bif"} + + assert cache.ZCache_get(ob) == "ABCDEF" + + def test_ZCache_invalidate(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + _cached = proxy._cached + proxy._cached[f"{_DUMMY_PATH_STR}|||"] = "XYZZY" + proxy._cached[f"{_DUMMY_PATH_STR}|foo||"] = "ABCDEF" + proxy._cached[f"{_DUMMY_PATH_STR}|bar||"] = "LMNOP" + + keys = _cached.keys() + _cached[_DUMMY_PATH_STR] = dict.fromkeys(keys, 1) + + cache.ZCache_invalidate(DummyOb()) + + assert len(_cached) == 0 + + def test_ZCache_set_simple(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + _cached = proxy._cached + + cache.ZCache_set(DummyOb(), "XYZZY") + + assert len(_cached) == 2 + key = f"{_DUMMY_PATH_STR}|||" + assert key in _cached[_DUMMY_PATH_STR] + assert _cached[key] == "XYZZY" + + def test_ZCache_set_with_view_name(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + _cached = proxy._cached + + cache.ZCache_set(DummyOb(), "XYZZY", view_name="v") + + assert len(_cached) == 2 + key = f"{_DUMMY_PATH_STR}|v||" + assert key in _cached[_DUMMY_PATH_STR] + assert _cached[key] == "XYZZY" + + def test_ZCache_set_replacing(self): + proxy = DummyProxy() + cache = self._makeOne(proxy) + + _cached = proxy._cached + key1 = f"{_DUMMY_PATH_STR}|||" + key2 = f"{_DUMMY_PATH_STR}|v||" + _cached[_DUMMY_PATH_STR] = {key1: 1, key2: 1} + _cached[key1] = "GHIJKL" + _cached[key2] = "ABCDE" + + cache.ZCache_set(DummyOb(), "XYZZY", view_name="v") + + assert len(_cached) == 3 + + assert key1 in _cached[_DUMMY_PATH_STR] + assert _cached[key1] == "GHIJKL" + + assert key2 in _cached[_DUMMY_PATH_STR] + assert _cached[key2] == "XYZZY" + + +class TestMemCacheZCacheManager: + def _getTargetClass(self): + from Products.mcdutils.zcache import MemCacheZCacheManager + + return MemCacheZCacheManager + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_conforms_to_IZCacheManager(self): + from Products.mcdutils.interfaces import IZCacheManager + from zope.interface.verify import verifyClass + + verifyClass(IZCacheManager, self._getTargetClass()) + + def test__init__(self): + mgr = self._makeOne("zcache", title="ZCache Manager") + + assert mgr.getId() == "zcache" + assert mgr.title == "ZCache Manager" + assert mgr.getProperty("title") == "ZCache Manager" + assert mgr.getProperty("proxy_path") == "" + assert mgr.getProperty("request_names") == () + + def test_ZCacheManager_getCache_with_proxy(self): + mgr = self._makeOne("zcache") + mgr.dummy_proxy = DummyProxy() + mgr.proxy_path = "dummy_proxy" + mgr.request_names = ("foo", "bar") + + cache = mgr.ZCacheManager_getCache() + + assert cache.proxy == mgr.dummy_proxy + assert cache.request_names == ("bar", "foo") + + +_DUMMY_PATH = ("path", "to", "dummy") +_DUMMY_PATH_STR = "/".join(_DUMMY_PATH) + + +class DummyOb: + def getPhysicalPath(self): + return _DUMMY_PATH + + +class DummyProxy: + def __init__(self): + self._cached = {} + + def set(self, key, value): + self._cached[key] = value + + def _get(self, key, default=None): + return self._cached.get(key, default) + + get = _get + + def delete(self, key, time=0): + try: + del self._cached[key] + return True + except KeyError: + return False diff --git a/tox.ini b/tox.ini deleted file mode 100644 index ea1963e..0000000 --- a/tox.ini +++ /dev/null @@ -1,111 +0,0 @@ -# Generated from: -# https://github.com/zopefoundation/meta/tree/master/config/zope-product -[tox] -minversion = 3.18 -envlist = - release-check - lint - py37 - py38 - py39 - py310 - py311 - py312 - docs - coverage - -[testenv] -skip_install = true -deps = - zc.buildout >= 3.0.1 - wheel > 0.37 -setenv = - py312: VIRTUALENV_PIP=23.1.2 - py312: PIP_REQUIRE_VIRTUALENV=0 -commands_pre = - {envbindir}/buildout -nc {toxinidir}/buildout.cfg buildout:directory={envdir} buildout:develop={toxinidir} install test -commands = - {envbindir}/test {posargs:-cv} -[testenv:release-check] -description = ensure that the distribution is ready to release -basepython = python3 -skip_install = true -deps = - twine - build - check-manifest - check-python-versions >= 0.20.0 - wheel -commands_pre = -commands = - check-manifest - check-python-versions - python -m build --sdist --no-isolation - twine check dist/* - -[testenv:lint] -basepython = python3 -commands_pre = - mkdir -p {toxinidir}/parts/flake8 -allowlist_externals = - mkdir -commands = - isort --check-only --diff {toxinidir}/src {toxinidir}/setup.py - flake8 {toxinidir}/src {toxinidir}/setup.py -deps = - flake8 - isort - # Useful flake8 plugins that are Python and Plone specific: - flake8-coding - flake8-debugger - mccabe - -[testenv:isort-apply] -basepython = python3 -commands_pre = -deps = - isort -commands = - isort {toxinidir}/src {toxinidir}/setup.py [] - -[testenv:docs] -basepython = python3 -skip_install = false -extras = - docs -commands_pre = -commands = - sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html - -[testenv:coverage] -basepython = python3 -skip_install = true -allowlist_externals = - {[testenv]allowlist_externals} - mkdir -deps = - {[testenv]deps} - coverage -commands = - mkdir -p {toxinidir}/parts/htmlcov - coverage run {envbindir}/test {posargs:-cv} - coverage html - coverage report -m --fail-under=84 - -[coverage:run] -branch = True -source = Products.mcdutils - -[coverage:report] -precision = 2 -exclude_lines = - pragma: no cover - pragma: nocover - except ImportError: - raise NotImplementedError - if __name__ == '__main__': - self.fail - raise AssertionError - -[coverage:html] -directory = parts/htmlcov
Key