diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3d0fa1036b..4cefd00cac 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,12 +2,10 @@ exclude: |
(?x)
# NOT INSTALLABLE ADDONS
^base_import_async/|
- ^queue_job/|
^queue_job_batch/|
^queue_job_cron/|
^queue_job_cron_jobrunner/|
^queue_job_subscribe/|
- ^test_queue_job/|
^test_queue_job_batch/|
# END NOT INSTALLABLE ADDONS
# Files and folders generated by bots, to avoid loops
diff --git a/base_import_async/models/base_import_import.py b/base_import_async/models/base_import_import.py
index f77a4bbc59..86c70e3e30 100644
--- a/base_import_async/models/base_import_import.py
+++ b/base_import_async/models/base_import_import.py
@@ -55,10 +55,9 @@ def execute_import(self, fields, columns, options, dryrun=False):
translated_model_name = search_result[0][1]
else:
translated_model_name = self._description
- description = _("Import %(model)s from file %(from_file)s") % {
- "model": translated_model_name,
- "from_file": self.file_name,
- }
+ description = _(
+ "Import %s from file %s", translated_model_name, self.file_name
+ )
attachment = self._create_csv_attachment(
import_fields, data, options, self.file_name
)
@@ -156,16 +155,13 @@ def _split_file(
):
chunk = str(priority - INIT_PRIORITY).zfill(padding)
description = _(
- "Import %(model)s from file %(file_name)s - "
- "#%(chunk)s - lines %(from)s to %(to)s"
+ "Import %s from file %s - #%s - lines %s to %s",
+ translated_model_name,
+ file_name,
+ chunk,
+ row_from + 1 + header_offset,
+ row_to + 1 + header_offset,
)
- description = description % {
- "model": translated_model_name,
- "file_name": file_name,
- "chunk": chunk,
- "from": row_from + 1 + header_offset,
- "to": row_to + 1 + header_offset,
- }
# create a CSV attachment and enqueue the job
root, ext = splitext(file_name)
attachment = self._create_csv_attachment(
diff --git a/queue_job/README.rst b/queue_job/README.rst
index 88b5a4d00b..54304cf76f 100644
--- a/queue_job/README.rst
+++ b/queue_job/README.rst
@@ -21,13 +21,13 @@ Job Queue
:target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html
:alt: License: LGPL-3
.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fqueue-lightgray.png?logo=github
- :target: https://github.com/OCA/queue/tree/18.0/queue_job
+ :target: https://github.com/OCA/queue/tree/19.0/queue_job
:alt: OCA/queue
.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png
- :target: https://translation.odoo-community.org/projects/queue-18-0/queue-18-0-queue_job
+ :target: https://translation.odoo-community.org/projects/queue-19-0/queue-19-0-queue_job
:alt: Translate me on Weblate
.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png
- :target: https://runboat.odoo-community.org/builds?repo=OCA/queue&target_branch=18.0
+ :target: https://runboat.odoo-community.org/builds?repo=OCA/queue&target_branch=19.0
:alt: Try me on Runboat
|badge1| |badge2| |badge3| |badge4| |badge5|
@@ -661,7 +661,7 @@ Bug Tracker
Bugs are tracked on `GitHub Issues `_.
In case of trouble, please check there if your issue has already been reported.
If you spotted it first, help us to smash it by providing a detailed and welcomed
-`feedback `_.
+`feedback `_.
Do not contact contributors directly about support or help with technical issues.
@@ -720,6 +720,6 @@ Current `maintainer `__:
|maintainer-guewen|
-This module is part of the `OCA/queue `_ project on GitHub.
+This module is part of the `OCA/queue `_ project on GitHub.
You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.
diff --git a/queue_job/__manifest__.py b/queue_job/__manifest__.py
index 70b9774518..01e6a89015 100644
--- a/queue_job/__manifest__.py
+++ b/queue_job/__manifest__.py
@@ -2,7 +2,7 @@
{
"name": "Job Queue",
- "version": "18.0.2.0.2",
+ "version": "19.0.1.0.0",
"author": "Camptocamp,ACSONE SA/NV,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/queue",
"license": "LGPL-3",
@@ -24,10 +24,10 @@
],
"assets": {
"web.assets_backend": [
- "/queue_job/static/src/views/**/*",
+ "queue_job/static/src/views/**/*",
],
},
- "installable": False,
+ "installable": True,
"development_status": "Mature",
"maintainers": ["guewen"],
"post_init_hook": "post_init_hook",
diff --git a/queue_job/controllers/main.py b/queue_job/controllers/main.py
index 6365e6efbc..da0a21c701 100644
--- a/queue_job/controllers/main.py
+++ b/queue_job/controllers/main.py
@@ -11,7 +11,7 @@
from psycopg2 import OperationalError, errorcodes
from werkzeug.exceptions import BadRequest, Forbidden
-from odoo import SUPERUSER_ID, _, api, http
+from odoo import SUPERUSER_ID, api, http
from odoo.modules.registry import Registry
from odoo.service.model import PG_CONCURRENCY_ERRORS_TO_RETRY
@@ -179,7 +179,7 @@ def create_test_job(
failure_rate=0,
):
if not http.request.env.user.has_group("base.group_erp_manager"):
- raise Forbidden(_("Access Denied"))
+ raise Forbidden(http.request.env._("Access Denied"))
if failure_rate is not None:
try:
@@ -280,7 +280,7 @@ def _create_graph_test_jobs(
priority=priority,
max_retries=max_retries,
channel=channel,
- description="%s #%d" % (description, current_count),
+ description=f"{description} #{current_count}",
)._test_job(failure_rate=failure_rate)
)
diff --git a/queue_job/delay.py b/queue_job/delay.py
index 0ba54e48a9..60e98dc9d6 100644
--- a/queue_job/delay.py
+++ b/queue_job/delay.py
@@ -496,7 +496,8 @@ def __del__(self):
def _set_from_dict(self, properties):
for key, value in properties.items():
if key not in self._properties:
- raise ValueError(f"No property {key}")
+ msg = f"No property {key}"
+ raise ValueError(msg)
setattr(self, key, value)
def set(self, *args, **kwargs):
diff --git a/queue_job/fields.py b/queue_job/fields.py
index 8cb45af765..d9f371605f 100644
--- a/queue_job/fields.py
+++ b/queue_job/fields.py
@@ -10,6 +10,7 @@
from odoo import fields, models
from odoo.tools.func import lazy
+from odoo.tools.misc import SENTINEL
class JobSerialized(fields.Json):
@@ -38,13 +39,14 @@ class JobSerialized(fields.Json):
),
}
- def __init__(self, string=fields.SENTINEL, base_type=fields.SENTINEL, **kwargs):
+ def __init__(self, string=SENTINEL, base_type=SENTINEL, **kwargs):
super().__init__(string=string, _base_type=base_type, **kwargs)
def _setup_attrs(self, model, name): # pylint: disable=missing-return
super()._setup_attrs(model, name)
if self._base_type not in self._default_json_mapping:
- raise ValueError(f"{self._base_type} is not a supported base type")
+ msg = f"{self._base_type} is not a supported base type"
+ raise ValueError(msg)
def _base_type_default_json(self, env):
default_json = self._default_json_mapping.get(self._base_type)
diff --git a/queue_job/job.py b/queue_job/job.py
index 6cfe12f232..48a7561553 100644
--- a/queue_job/job.py
+++ b/queue_job/job.py
@@ -209,7 +209,8 @@ def load(cls, env, job_uuid):
"""
stored = cls.db_records_from_uuids(env, [job_uuid])
if not stored:
- raise NoSuchJobError(f"Job {job_uuid} does no longer exist in the storage.")
+ msg = f"Job {job_uuid} does no longer exist in the storage."
+ raise NoSuchJobError(msg)
return cls._load_from_db_record(stored)
@classmethod
@@ -505,7 +506,7 @@ def perform(self):
# traceback and message:
# http://blog.ianbicking.org/2007/09/12/re-raising-exceptions/
new_exc = FailedJobError(
- "Max. retries (%d) reached: %s" % (self.max_retries, value or type_)
+ f"Max. retries ({self.max_retries}) reached: {value or type_}"
)
raise new_exc from err
raise
@@ -813,7 +814,7 @@ def set_failed(self, **kw):
setattr(self, k, v)
def __repr__(self):
- return "" % (self.uuid, self.priority)
+ return f""
def _get_retry_seconds(self, seconds=None):
retry_pattern = self.job_config.retry_pattern
@@ -828,7 +829,7 @@ def _get_retry_seconds(self, seconds=None):
break
elif not seconds:
seconds = RETRY_INTERVAL
- if isinstance(seconds, (list | tuple)):
+ if isinstance(seconds, list | tuple):
seconds = randint(seconds[0], seconds[1])
return seconds
@@ -856,8 +857,7 @@ def related_action(self):
funcname = record._default_related_action
if not isinstance(funcname, str):
raise ValueError(
- "related_action must be the name of the "
- "method on queue.job as string"
+ "related_action must be the name of the method on queue.job as string"
)
action = getattr(record, funcname)
action_kwargs = self.job_config.related_action_kwargs
diff --git a/queue_job/jobrunner/__init__.py b/queue_job/jobrunner/__init__.py
index e2561b0e74..50dd45e39d 100644
--- a/queue_job/jobrunner/__init__.py
+++ b/queue_job/jobrunner/__init__.py
@@ -5,11 +5,14 @@
import logging
from threading import Thread
import time
+from configparser import ConfigParser
from odoo.service import server
from odoo.tools import config
try:
+ # Preferred source when available: structured [queue_job] section provided
+ # by OCA's server_environment addon.
from odoo.addons.server_environment import serv_config
if serv_config.has_section("queue_job"):
@@ -17,10 +20,17 @@
else:
queue_job_config = {}
except ImportError:
- queue_job_config = config.misc.get("queue_job", {})
+ # No server_environment: try to read a [queue_job] section from odoo.conf
+ queue_job_config = {}
+ cfg_path = config.get("config")
+ if cfg_path:
+ cp = ConfigParser(interpolation=None)
+ cp.read(cfg_path)
+ if cp.has_section("queue_job"):
+ queue_job_config = dict(cp["queue_job"])
-from .runner import QueueJobRunner, _channels
+from .runner import QueueJobRunner, _channels # noqa: E402
_logger = logging.getLogger(__name__)
diff --git a/queue_job/jobrunner/channels.py b/queue_job/jobrunner/channels.py
index c895d9caf3..bfc8984862 100644
--- a/queue_job/jobrunner/channels.py
+++ b/queue_job/jobrunner/channels.py
@@ -455,12 +455,9 @@ def get_subchannel_by_name(self, subchannel_name):
def __str__(self):
capacity = "∞" if self.capacity is None else str(self.capacity)
- return "%s(C:%s,Q:%d,R:%d,F:%d)" % (
- self.fullname,
- capacity,
- len(self._queue),
- len(self._running),
- len(self._failed),
+ return (
+ f"{self.fullname}(C:{capacity},Q:{len(self._queue)},"
+ f"R:{len(self._running)},F:{len(self._failed)})"
)
def remove(self, job):
@@ -894,8 +891,7 @@ def parse_simple_config(cls, config_string):
)
if k in config:
raise ValueError(
- f"Invalid channel config {config_string}: "
- f"duplicate key {k}"
+ f"Invalid channel config {config_string}: duplicate key {k}"
)
config[k] = v
else:
@@ -996,7 +992,8 @@ def get_channel_by_name(
if channel_name in self._channels_by_name:
return self._channels_by_name[channel_name]
if not autocreate and not parent_fallback:
- raise ChannelNotFound(f"Channel {channel_name} not found")
+ msg = f"Channel {channel_name} not found"
+ raise ChannelNotFound(msg)
parent = self._root_channel
if parent_fallback:
# Look for first direct parent w/ config.
diff --git a/queue_job/jobrunner/runner.py b/queue_job/jobrunner/runner.py
index a1aa70a4d4..59f1f87c54 100644
--- a/queue_job/jobrunner/runner.py
+++ b/queue_job/jobrunner/runner.py
@@ -472,11 +472,10 @@ def from_environ_or_config(cls):
return runner
def get_db_names(self):
- if config["db_name"]:
- db_names = config["db_name"].split(",")
- else:
- db_names = odoo.service.db.list_dbs(True)
- return db_names
+ db_names = config["db_name"] or []
+ if db_names:
+ return list(db_names)
+ return odoo.service.db.list_dbs(True)
def close_databases(self, remove_jobs=True):
for db_name, db in self.db_by_name.items():
diff --git a/queue_job/models/base.py b/queue_job/models/base.py
index 3a68ffd11b..2c71cafc17 100644
--- a/queue_job/models/base.py
+++ b/queue_job/models/base.py
@@ -193,9 +193,17 @@ def foo_job_options(self, arg1):
}
def _register_hook(self):
- self._patch_method(
+ # patch the method at registry time
+ patched = self._patch_job_auto_delay(
+ "foo", context_key="auto_delay_foo"
+ )
+ setattr(
+ type(self),
"foo",
- self._patch_job_auto_delay("foo", context_key="auto_delay_foo")
+ functools.update_wrapper(
+ patched,
+ getattr(type(self), "foo"),
+ ),
)
return super()._register_hook()
@@ -224,8 +232,9 @@ def auto_delay_wrapper(self, *args, **kwargs):
delayed = self.with_delay(**job_options)
return getattr(delayed, method_name)(*args, **kwargs)
- origin = getattr(self, method_name)
- return functools.update_wrapper(auto_delay_wrapper, origin)
+ origin_func = getattr(type(self), method_name)
+ auto_delay_wrapper.origin = origin_func
+ return functools.update_wrapper(auto_delay_wrapper, origin_func)
@api.model
def _job_store_values(self, job):
@@ -259,12 +268,3 @@ def _job_prepare_context_before_enqueue(self):
for key, value in self.env.context.items()
if key in self._job_prepare_context_before_enqueue_keys()
}
-
- @classmethod
- def _patch_method(cls, name, method):
- origin = getattr(cls, name)
- method.origin = origin
- # propagate decorators from origin to method, and apply api decorator
- wrapped = api.propagate(origin, method)
- wrapped.origin = origin
- setattr(cls, name, wrapped)
diff --git a/queue_job/models/queue_job.py b/queue_job/models/queue_job.py
index 411ae43af5..b1a5dcaf7b 100644
--- a/queue_job/models/queue_job.py
+++ b/queue_job/models/queue_job.py
@@ -5,8 +5,9 @@
import random
from datetime import datetime, timedelta
-from odoo import _, api, exceptions, fields, models
-from odoo.tools import config, html_escape, index_exists
+from odoo import api, exceptions, fields, models
+from odoo.tools import config, html_escape
+from odoo.tools.sql import create_index
from odoo.addons.base_sparse_field.models.fields import Serialized
@@ -127,38 +128,41 @@ class QueueJob(models.Model):
worker_pid = fields.Integer(readonly=True)
def init(self):
- index_1 = "queue_job_identity_key_state_partial_index"
- index_2 = "queue_job_channel_date_done_date_created_index"
- if not index_exists(self._cr, index_1):
- # Used by Job.job_record_with_same_identity_key
- self._cr.execute(
- "CREATE INDEX queue_job_identity_key_state_partial_index "
- "ON queue_job (identity_key) WHERE state in ('pending', "
- "'enqueued', 'wait_dependencies') AND identity_key IS NOT NULL;"
- )
- if not index_exists(self._cr, index_2):
- # Used by .autovacuum
- self._cr.execute(
- "CREATE INDEX queue_job_channel_date_done_date_created_index "
- "ON queue_job (channel, date_done, date_created);"
- )
+ cr = self.env.cr
+ # Used by Job.job_record_with_same_identity_key
+ create_index(
+ cr,
+ "queue_job_identity_key_state_partial_index",
+ "queue_job",
+ ["identity_key"],
+ where=(
+ "state in ('pending','enqueued','wait_dependencies') "
+ "AND identity_key IS NOT NULL"
+ ),
+ comment=("Queue Job: partial index for identity_key on active states"),
+ )
+ # Used by .autovacuum
+ create_index(
+ cr,
+ "queue_job_channel_date_done_date_created_index",
+ "queue_job",
+ ["channel", "date_done", "date_created"],
+ comment="Queue Job: index to accelerate autovacuum",
+ )
@api.depends("dependencies")
def _compute_dependency_graph(self):
- jobs_groups = self.env["queue.job"].read_group(
- [
- (
- "graph_uuid",
- "in",
- [uuid for uuid in self.mapped("graph_uuid") if uuid],
+ graph_uuids = [uuid for uuid in self.mapped("graph_uuid") if uuid]
+ if graph_uuids:
+ ids_per_graph_uuid = dict(
+ self.env["queue.job"]._read_group(
+ [("graph_uuid", "in", graph_uuids)],
+ groupby=["graph_uuid"],
+ aggregates=["id:array_agg"],
)
- ],
- ["graph_uuid", "ids:array_agg(id)"],
- ["graph_uuid"],
- )
- ids_per_graph_uuid = {
- group["graph_uuid"]: group["ids"] for group in jobs_groups
- }
+ )
+ else:
+ ids_per_graph_uuid = {}
for record in self:
if not record.graph_uuid:
record.dependency_graph = {}
@@ -216,20 +220,17 @@ def _dependency_graph_vis_node(self):
}
def _compute_graph_jobs_count(self):
- jobs_groups = self.env["queue.job"].read_group(
- [
- (
- "graph_uuid",
- "in",
- [uuid for uuid in self.mapped("graph_uuid") if uuid],
+ graph_uuids = [uuid for uuid in self.mapped("graph_uuid") if uuid]
+ if graph_uuids:
+ count_per_graph_uuid = dict(
+ self.env["queue.job"]._read_group(
+ [("graph_uuid", "in", graph_uuids)],
+ groupby=["graph_uuid"],
+ aggregates=["__count"],
)
- ],
- ["graph_uuid"],
- ["graph_uuid"],
- )
- count_per_graph_uuid = {
- group["graph_uuid"]: group["graph_uuid_count"] for group in jobs_groups
- }
+ )
+ else:
+ count_per_graph_uuid = {}
for record in self:
record.graph_jobs_count = count_per_graph_uuid.get(record.graph_uuid) or 0
@@ -247,11 +248,12 @@ def write(self, vals):
fieldname for fieldname in vals if fieldname in self._protected_fields
]
if write_on_protected_fields:
- raise exceptions.AccessError(
- _("Not allowed to change field(s): {}").format(
- write_on_protected_fields
- )
+ # use env translation and lazy formatting (args to _)
+ msg = self.env._(
+ "Not allowed to change field(s): %s",
+ ", ".join(write_on_protected_fields),
)
+ raise exceptions.AccessError(msg)
different_user_jobs = self.browse()
if vals.get("user_id"):
@@ -279,7 +281,8 @@ def open_related_action(self):
job = Job.load(self.env, self.uuid)
action = job.related_action()
if action is None:
- raise exceptions.UserError(_("No action available for this job"))
+ msg = self.env._("No action available for this job")
+ raise exceptions.UserError(msg)
return action
def open_graph_jobs(self):
@@ -292,7 +295,7 @@ def open_graph_jobs(self):
)
action.update(
{
- "name": _("Jobs for graph %s") % (self.graph_uuid),
+ "name": self.env._("Jobs for graph %s", self.graph_uuid),
"context": {},
"domain": [("id", "in", jobs.ids)],
}
@@ -321,15 +324,16 @@ def _change_job_state(self, state, result=None):
record.env["queue.job"].flush_model()
job_.cancel_dependent_jobs()
else:
- raise ValueError(f"State not supported: {state}")
+ msg = f"State not supported: {state}"
+ raise ValueError(msg)
def button_done(self):
- result = _("Manually set to done by {}").format(self.env.user.name)
+ result = self.env._("Manually set to done by %s", self.env.user.name)
self._change_job_state(DONE, result=result)
return True
def button_cancelled(self):
- result = _("Cancelled by {}").format(self.env.user.name)
+ result = self.env._("Cancelled by %s", self.env.user.name)
self._change_job_state(CANCELLED, result=result)
return True
@@ -356,7 +360,7 @@ def _subscribe_users_domain(self):
if not group:
return None
companies = self.mapped("company_id")
- domain = [("groups_id", "=", group.id)]
+ domain = [("group_ids", "=", group.id)]
if companies:
domain.append(("company_id", "in", companies.ids))
return domain
@@ -370,7 +374,7 @@ def _message_failed_job(self):
If nothing is returned, no message will be posted.
"""
self.ensure_one()
- return _(
+ return self.env._(
"Something bad happened during the execution of the job. "
"More details in the 'Exception Information' section."
)
@@ -388,8 +392,9 @@ def autovacuum(self):
Called from a cron.
"""
- for channel in self.env["queue.job.channel"].search([]):
+ for channel in self.env["queue.job.channel"].search([]): # pylint: disable=no-search-all
deadline = datetime.now() - timedelta(days=int(channel.removal_interval))
+ # Delete in chunks using a stable order (matches composite index)
while True:
jobs = self.search(
[
@@ -425,7 +430,7 @@ def related_action_open_record(self):
if not records:
return None
action = {
- "name": _("Related Record"),
+ "name": self.env._("Related Record"),
"type": "ir.actions.act_window",
"view_mode": "form",
"res_model": records._name,
@@ -435,7 +440,7 @@ def related_action_open_record(self):
else:
action.update(
{
- "name": _("Related Records"),
+ "name": self.env._("Related Records"),
"view_mode": "list,form",
"domain": [("id", "in", records.ids)],
}
diff --git a/queue_job/models/queue_job_channel.py b/queue_job/models/queue_job_channel.py
index 4aabb0188c..0a5c1c917b 100644
--- a/queue_job/models/queue_job_channel.py
+++ b/queue_job/models/queue_job_channel.py
@@ -2,7 +2,7 @@
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
-from odoo import _, api, exceptions, fields, models
+from odoo import api, exceptions, fields, models
class QueueJobChannel(models.Model):
@@ -26,9 +26,10 @@ class QueueJobChannel(models.Model):
default=lambda self: self.env["queue.job"]._removal_interval, required=True
)
- _sql_constraints = [
- ("name_uniq", "unique(complete_name)", "Channel complete name must be unique")
- ]
+ _name_uniq = models.Constraint(
+ "UNIQUE(complete_name)",
+ "Channel complete name must be unique",
+ )
@api.depends("name", "parent_id.complete_name")
def _compute_complete_name(self):
@@ -45,7 +46,8 @@ def _compute_complete_name(self):
def parent_required(self):
for record in self:
if record.name != "root" and not record.parent_id:
- raise exceptions.ValidationError(_("Parent channel required."))
+ msg = self.env._("Parent channel required.")
+ raise exceptions.ValidationError(msg)
@api.model_create_multi
def create(self, vals_list):
@@ -79,11 +81,13 @@ def write(self, values):
and channel.name == "root"
and ("name" in values or "parent_id" in values)
):
- raise exceptions.UserError(_("Cannot change the root channel"))
+ msg = self.env._("Cannot change the root channel")
+ raise exceptions.UserError(msg)
return super().write(values)
- def unlink(self):
+ @api.ondelete(at_uninstall=False)
+ def _check_not_root_ondelete(self):
for channel in self:
if channel.name == "root":
- raise exceptions.UserError(_("Cannot remove the root channel"))
- return super().unlink()
+ msg = self.env._("Cannot remove the root channel")
+ raise exceptions.UserError(msg)
diff --git a/queue_job/models/queue_job_function.py b/queue_job/models/queue_job_function.py
index 7cf73ea370..5f86f7a214 100644
--- a/queue_job/models/queue_job_function.py
+++ b/queue_job/models/queue_job_function.py
@@ -6,7 +6,7 @@
import re
from collections import namedtuple
-from odoo import _, api, exceptions, fields, models, tools
+from odoo import api, exceptions, fields, models, tools
from ..fields import JobSerialized
@@ -91,14 +91,16 @@ def _compute_name(self):
def _inverse_name(self):
groups = regex_job_function_name.match(self.name)
if not groups:
- raise exceptions.UserError(_("Invalid job function: {}").format(self.name))
+ msg = self.env._("Invalid job function: %s", self.name)
+ raise exceptions.UserError(msg)
model_name = groups[1]
method = groups[2]
model = (
self.env["ir.model"].sudo().search([("model", "=", model_name)], limit=1)
)
if not model:
- raise exceptions.UserError(_("Model {} not found").format(model_name))
+ msg = self.env._("Model %s not found", model_name)
+ raise exceptions.UserError(msg)
self.model_id = model.id
self.method = method
@@ -187,12 +189,13 @@ def job_config(self, name):
)
def _retry_pattern_format_error_message(self):
- return _(
- "Unexpected format of Retry Pattern for {}.\n"
+ return self.env._(
+ "Unexpected format of Retry Pattern for %s.\n"
"Example of valid formats:\n"
"{{1: 300, 5: 600, 10: 1200, 15: 3000}}\n"
- "{{1: (1, 10), 5: (11, 20), 10: (21, 30), 15: (100, 300)}}"
- ).format(self.name)
+ "{{1: (1, 10), 5: (11, 20), 10: (21, 30), 15: (100, 300)}}",
+ self.name,
+ )
@api.constrains("retry_pattern")
def _check_retry_pattern(self):
@@ -211,7 +214,7 @@ def _check_retry_pattern(self):
) from ex
def _retry_value_type_check(self, value):
- if isinstance(value, (tuple | list)):
+ if isinstance(value, tuple | list):
if len(value) != 2:
raise ValueError
[self._retry_value_type_check(element) for element in value]
@@ -219,12 +222,13 @@ def _retry_value_type_check(self, value):
int(value)
def _related_action_format_error_message(self):
- return _(
- "Unexpected format of Related Action for {}.\n"
+ return self.env._(
+ "Unexpected format of Related Action for %s.\n"
"Example of valid format:\n"
'{{"enable": True, "func_name": "related_action_foo",'
- ' "kwargs" {{"limit": 10}}}}'
- ).format(self.name)
+ ' "kwargs" {{"limit": 10}}}}',
+ self.name,
+ )
@api.constrains("related_action")
def _check_related_action(self):
diff --git a/queue_job/security/security.xml b/queue_job/security/security.xml
index 947644e95c..740226988e 100644
--- a/queue_job/security/security.xml
+++ b/queue_job/security/security.xml
@@ -1,16 +1,20 @@
+
+ Job Queue
+ 50
+
Job Queue
20
Job Queue Manager
-
+
diff --git a/queue_job/static/description/index.html b/queue_job/static/description/index.html
index 6cc2121a4d..0e385a33cb 100644
--- a/queue_job/static/description/index.html
+++ b/queue_job/static/description/index.html
@@ -374,7 +374,7 @@ Job Queue
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! source digest: sha256:58f9182440bb316576671959b69148ea5454958f9ae8db75bccd30c89012676d
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -->
-

+

This addon adds an integrated Job Queue to Odoo.
It allows to postpone method calls executed asynchronously.
Jobs are executed in the background by a Jobrunner, in their own
@@ -962,7 +962,7 @@
Bugs are tracked on GitHub Issues.
In case of trouble, please check there if your issue has already been reported.
If you spotted it first, help us to smash it by providing a detailed and welcomed
-feedback.
+feedback.
Do not contact contributors directly about support or help with technical issues.
diff --git a/queue_job/tests/common.py b/queue_job/tests/common.py
index ec036bd639..c7569fa8f8 100644
--- a/queue_job/tests/common.py
+++ b/queue_job/tests/common.py
@@ -4,12 +4,11 @@
import logging
import typing
from contextlib import contextmanager
-from itertools import groupby
from operator import attrgetter
from unittest import TestCase, mock
from odoo.tests.case import TestCase as _TestCase
-from odoo.tests.common import MetaCase
+from odoo.tools import groupby
from odoo.addons.queue_job.delay import Graph
@@ -211,14 +210,14 @@ def assert_enqueued_job(self, method, args=None, kwargs=None, properties=None):
)
if expected_call not in actual_calls:
- raise AssertionError(
- "Job {} was not enqueued.\n" "Actual enqueued jobs:\n{}".format(
- self._format_job_call(expected_call),
- "\n".join(
- f" * {self._format_job_call(call)}" for call in actual_calls
- ),
- )
+ actual_lines = "\n".join(
+ f" * {self._format_job_call(call)}" for call in actual_calls
+ )
+ msg = (
+ f"Job {self._format_job_call(expected_call)} was not enqueued.\n"
+ f"Actual enqueued jobs:\n{actual_lines}"
)
+ raise AssertionError(msg)
def perform_enqueued_jobs(self):
"""Perform the enqueued jobs synchronously"""
@@ -298,18 +297,25 @@ def _filtered_enqueued_jobs(self, job_method):
return enqueued_jobs
def _format_job_call(self, call):
- method_all_args = []
+ # Build method argument string (positional and keyword) separately
+ method_args_parts = []
if call.args:
- method_all_args.append(", ".join(f"{arg}" for arg in call.args))
+ method_args_parts.append(", ".join(f"{arg}" for arg in call.args))
if call.kwargs:
- method_all_args.append(
+ method_args_parts.append(
", ".join(f"{key}={value}" for key, value in call.kwargs.items())
)
- return "<{}>.{}({}) with properties ({})".format(
- call.method.__self__,
- call.method.__name__,
- ", ".join(method_all_args),
- ", ".join(f"{key}={value}" for key, value in call.properties.items()),
+ method_args = ", ".join(method_args_parts)
+
+ # Build properties string
+ props_str = ", ".join(
+ f"{key}={value}" for key, value in call.properties.items()
+ )
+
+ return (
+ f"<{call.method.__self__}>."
+ f"{call.method.__name__}({method_args}) "
+ f"with properties ({props_str})"
)
def __repr__(self):
@@ -335,7 +341,7 @@ def search_created(self):
return self.search_all() - self.existing
def search_all(self):
- return self.env["queue.job"].search([])
+ return self.env["queue.job"].search([]) # pylint: disable=no-search-all
class JobMixin:
@@ -353,7 +359,7 @@ def trap_jobs(self):
@contextmanager
-def mock_with_delay(): # pylint: disable=E501
+def mock_with_delay(): # pylint: disable=line-too-long
"""Context Manager mocking ``with_delay()``
DEPRECATED: use ``trap_jobs()'``.
@@ -414,7 +420,7 @@ def test_export(self):
yield delayable_cls, delayable
-class OdooDocTestCase(doctest.DocTestCase, _TestCase, MetaCase("DummyCase", (), {})):
+class OdooDocTestCase(doctest.DocTestCase, _TestCase):
"""
We need a custom DocTestCase class in order to:
- define test_tags to run as part of standard tests
diff --git a/queue_job/tests/test_json_field.py b/queue_job/tests/test_json_field.py
index f5bf760ffe..76bb59c977 100644
--- a/queue_job/tests/test_json_field.py
+++ b/queue_job/tests/test_json_field.py
@@ -6,6 +6,7 @@
from lxml import etree
+from odoo import Command
from odoo.tests import common
# pylint: disable=odoo-addons-relative-import
@@ -14,8 +15,24 @@
class TestJson(common.TransactionCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ User = cls.env["res.users"]
+ main_company = cls.env.ref("base.main_company")
+ group_user = cls.env.ref("base.group_user")
+ cls.demo_user = User.create(
+ {
+ "name": "Demo User (Queue)",
+ "login": "queue_demo_user",
+ "company_id": main_company.id,
+ "company_ids": [Command.set([main_company.id])],
+ "group_ids": [Command.set([group_user.id])],
+ }
+ )
+
def test_encoder_recordset(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
context = demo_user.context_get()
partner = self.env(user=demo_user, context=context).ref("base.main_partner")
value = partner
@@ -33,7 +50,7 @@ def test_encoder_recordset(self):
self.assertEqual(json.loads(value_json), expected)
def test_encoder_recordset_list(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
context = demo_user.context_get()
partner = self.env(user=demo_user, context=context).ref("base.main_partner")
value = ["a", 1, partner]
@@ -55,7 +72,7 @@ def test_encoder_recordset_list(self):
self.assertEqual(json.loads(value_json), expected)
def test_decoder_recordset(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
context = demo_user.context_get()
partner = self.env(user=demo_user).ref("base.main_partner")
@@ -76,7 +93,7 @@ def test_decoder_recordset(self):
self.assertEqual(demo_user, expected.env.user)
def test_decoder_recordset_list(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
context = demo_user.context_get()
partner = self.env(user=demo_user).ref("base.main_partner")
value_json = json.dumps(
@@ -100,7 +117,7 @@ def test_decoder_recordset_list(self):
def test_decoder_recordset_list_without_user(self):
value_json = (
- '["a", 1, {"_type": "odoo_recordset",' '"model": "res.users", "ids": [1]}]'
+ '["a", 1, {"_type": "odoo_recordset", "model": "res.users", "ids": [1]}]'
)
expected = ["a", 1, self.env.ref("base.user_root")]
value = json.loads(value_json, cls=JobDecoder, env=self.env)
@@ -132,7 +149,7 @@ def test_encoder_date(self):
self.assertEqual(json.loads(value_json), expected)
def test_decoder_date(self):
- value_json = '["a", 1, {"_type": "date_isoformat",' '"value": "2017-04-19"}]'
+ value_json = '["a", 1, {"_type": "date_isoformat", "value": "2017-04-19"}]'
expected = ["a", 1, date(2017, 4, 19)]
value = json.loads(value_json, cls=JobDecoder, env=self.env)
self.assertEqual(value, expected)
diff --git a/queue_job/tests/test_queue_job_protected_write.py b/queue_job/tests/test_queue_job_protected_write.py
index eadb16ab9c..9a3a8eb14c 100644
--- a/queue_job/tests/test_queue_job_protected_write.py
+++ b/queue_job/tests/test_queue_job_protected_write.py
@@ -22,9 +22,6 @@ def test_create_error(self):
"uuid": "test",
},
},
- headers={
- "Cookie": f"session_id={self.session.sid};",
- },
)
self.assertEqual("odoo.exceptions.AccessError", str(cm.exception))
diff --git a/queue_job/views/queue_job_function_views.xml b/queue_job/views/queue_job_function_views.xml
index e725920b2c..96f33bb09e 100644
--- a/queue_job/views/queue_job_function_views.xml
+++ b/queue_job/views/queue_job_function_views.xml
@@ -35,13 +35,12 @@
-
-
-
+
+
diff --git a/queue_job/views/queue_job_views.xml b/queue_job/views/queue_job_views.xml
index fba121b21a..e0e35816b7 100644
--- a/queue_job/views/queue_job_views.xml
+++ b/queue_job/views/queue_job_views.xml
@@ -213,11 +213,7 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test_queue_job/__manifest__.py b/test_queue_job/__manifest__.py
index fe6c339e98..3cf7243aa7 100644
--- a/test_queue_job/__manifest__.py
+++ b/test_queue_job/__manifest__.py
@@ -3,7 +3,7 @@
{
"name": "Queue Job Tests",
- "version": "18.0.2.0.0",
+ "version": "19.0.1.0.0",
"author": "Camptocamp,Odoo Community Association (OCA)",
"license": "LGPL-3",
"category": "Generic Modules",
@@ -15,5 +15,5 @@
"security/ir.model.access.csv",
"data/queue_job_test_job.xml",
],
- "installable": False,
+ "installable": True,
}
diff --git a/test_queue_job/models/test_models.py b/test_queue_job/models/test_models.py
index 03e8e8a8f9..34d7183c87 100644
--- a/test_queue_job/models/test_models.py
+++ b/test_queue_job/models/test_models.py
@@ -1,6 +1,7 @@
# Copyright 2016 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
+import functools
from datetime import datetime, timedelta
from odoo import api, fields, models
@@ -127,15 +128,20 @@ def delay_me_context_key(self):
return "ok"
def _register_hook(self):
- self._patch_method("delay_me", self._patch_job_auto_delay("delay_me"))
- self._patch_method(
- "delay_me_options", self._patch_job_auto_delay("delay_me_options")
+ patched = self._patch_job_auto_delay("delay_me")
+ type(self).delay_me = functools.update_wrapper(patched, type(self).delay_me)
+
+ patched = self._patch_job_auto_delay("delay_me_options")
+ type(self).delay_me_options = functools.update_wrapper(
+ patched, type(self).delay_me_options
)
- self._patch_method(
+
+ patched = self._patch_job_auto_delay(
"delay_me_context_key",
- self._patch_job_auto_delay(
- "delay_me_context_key", context_key="auto_delay_delay_me_context_key"
- ),
+ context_key="auto_delay_delay_me_context_key",
+ )
+ type(self).delay_me_context_key = functools.update_wrapper(
+ patched, type(self).delay_me_context_key
)
return super()._register_hook()
diff --git a/test_queue_job/tests/test_autovacuum.py b/test_queue_job/tests/test_autovacuum.py
index 97aebcba1e..32378a14ac 100644
--- a/test_queue_job/tests/test_autovacuum.py
+++ b/test_queue_job/tests/test_autovacuum.py
@@ -19,7 +19,7 @@ def test_old_jobs_are_deleted_by_cron_job(self):
)
stored = self._create_job()
stored.write({"date_done": date_done})
- self.cron_job.method_direct_trigger()
+ self.env["queue.job"].autovacuum()
self.assertFalse(stored.exists())
def test_autovacuum(self):
diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py
index 4d771f5516..52c9290f9b 100644
--- a/test_queue_job/tests/test_job.py
+++ b/test_queue_job/tests/test_job.py
@@ -6,6 +6,7 @@
from unittest import mock
import odoo.tests.common as common
+from odoo import Command
from odoo.addons.queue_job import identity_exact
from odoo.addons.queue_job.delay import DelayableGraph
@@ -32,6 +33,21 @@
class TestJobsOnTestingMethod(JobCommonCase):
"""Test Job"""
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ main_company = cls.env.ref("base.main_company")
+ group_user = cls.env.ref("base.group_user")
+ cls.demo_user = cls.env["res.users"].create(
+ {
+ "name": "Demo User (Queue)",
+ "login": "queue_demo_user_3",
+ "company_id": main_company.id,
+ "company_ids": [Command.set([main_company.id])],
+ "group_ids": [Command.set([group_user.id])],
+ }
+ )
+
def test_new_job(self):
"""
Create a job
@@ -387,6 +403,21 @@ def test_job_identity_key_func_exact(self):
class TestJobs(JobCommonCase):
"""Test jobs on other methods or with different job configuration"""
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ main_company = cls.env.ref("base.main_company")
+ group_user = cls.env.ref("base.group_user")
+ cls.demo_user = cls.env["res.users"].create(
+ {
+ "name": "Demo User (Queue)",
+ "login": "queue_demo_user_4",
+ "company_id": main_company.id,
+ "company_ids": [Command.set([main_company.id])],
+ "group_ids": [Command.set([group_user.id])],
+ }
+ )
+
def test_description(self):
"""If no description is given to the job, it
should be computed from the function
@@ -490,7 +521,7 @@ def test_job_with_mutable_arguments(self):
self.assertEqual({"mutable_kwarg": {"a": 1}}, job_instance.kwargs)
def test_store_env_su_no_sudo(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
self.env = self.env(user=demo_user)
delayable = self.env["test.queue.job"].with_delay()
test_job = delayable.testing_method()
@@ -500,7 +531,7 @@ def test_store_env_su_no_sudo(self):
self.assertTrue(job_instance.user_id, demo_user)
def test_store_env_su_sudo(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
self.env = self.env(user=demo_user)
delayable = self.env["test.queue.job"].sudo().with_delay()
test_job = delayable.testing_method()
@@ -511,6 +542,22 @@ def test_store_env_su_sudo(self):
class TestJobModel(JobCommonCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ User = cls.env["res.users"]
+ main_company = cls.env.ref("base.main_company")
+ group_user = cls.env.ref("base.group_user")
+ cls.demo_user = User.create(
+ {
+ "name": "Demo User (Queue)",
+ "login": "queue_demo_user_5",
+ "company_id": main_company.id,
+ "company_ids": [Command.set([main_company.id])],
+ "group_ids": [Command.set([group_user.id])],
+ }
+ )
+
def test_job_change_state(self):
stored = self._create_job()
stored._change_job_state(DONE, result="test")
@@ -604,17 +651,17 @@ def test_follower_when_write_fail(self):
vals = {
"name": "xx",
"login": "xx",
- "groups_id": [(6, 0, [group.id])],
+ "group_ids": [Command.set([group.id])],
"active": False,
}
inactiveusr = self.user.create(vals)
inactiveusr.partner_id.active = True
- self.assertFalse(inactiveusr in group.users)
+ self.assertFalse(inactiveusr in group.user_ids)
stored = self._create_job()
stored.write({"state": "failed"})
followers = stored.message_follower_ids.mapped("partner_id")
self.assertFalse(inactiveusr.partner_id in followers)
- self.assertFalse({u.partner_id for u in group.users} - set(followers))
+ self.assertFalse({u.partner_id for u in group.user_ids} - set(followers))
def test_wizard_requeue(self):
stored = self._create_job()
@@ -638,7 +685,7 @@ def test_override_channel(self):
self.assertEqual("root.sub.sub", test_job.channel)
def test_job_change_user_id(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
stored = self._create_job()
stored.user_id = demo_user
self.assertEqual(stored.records.env.uid, demo_user.id)
@@ -663,10 +710,10 @@ def setUp(self):
self.simple_user = User.create(
{
"partner_id": self.partner_user.id,
- "company_ids": [(4, main_company.id)],
+ "company_ids": [Command.link(main_company.id)],
"login": "simple_user",
"name": "simple user",
- "groups_id": [],
+ "group_ids": [],
}
)
@@ -684,10 +731,10 @@ def setUp(self):
{
"partner_id": self.other_partner_a.id,
"company_id": self.other_company_a.id,
- "company_ids": [(4, self.other_company_a.id)],
+ "company_ids": [Command.link(self.other_company_a.id)],
"login": "my_login a",
"name": "my user A",
- "groups_id": [(4, grp_queue_job_manager)],
+ "group_ids": [Command.link(grp_queue_job_manager)],
}
)
self.other_partner_b = Partner.create(
@@ -704,10 +751,10 @@ def setUp(self):
{
"partner_id": self.other_partner_b.id,
"company_id": self.other_company_b.id,
- "company_ids": [(4, self.other_company_b.id)],
+ "company_ids": [Command.link(self.other_company_b.id)],
"login": "my_login_b",
"name": "my user B",
- "groups_id": [(4, grp_queue_job_manager)],
+ "group_ids": [Command.link(grp_queue_job_manager)],
}
)
@@ -761,7 +808,7 @@ def test_job_subscription(self):
stored._message_post_on_failure()
users = (
User.search(
- [("groups_id", "=", self.ref("queue_job.group_queue_job_manager"))]
+ [("group_ids", "=", self.ref("queue_job.group_queue_job_manager"))]
)
+ stored.user_id
)
diff --git a/test_queue_job/tests/test_json_field.py b/test_queue_job/tests/test_json_field.py
index bfe227bb10..33b6bcdb97 100644
--- a/test_queue_job/tests/test_json_field.py
+++ b/test_queue_job/tests/test_json_field.py
@@ -3,6 +3,7 @@
import json
+from odoo import Command
from odoo.tests import common
# pylint: disable=odoo-addons-relative-import
@@ -11,17 +12,33 @@
class TestJsonField(common.TransactionCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ User = cls.env["res.users"]
+ main_company = cls.env.ref("base.main_company")
+ group_user = cls.env.ref("base.group_user")
+ cls.demo_user = User.create(
+ {
+ "name": "Demo User (Queue)",
+ "login": "queue_demo_user_2",
+ "company_id": main_company.id,
+ "company_ids": [Command.set([main_company.id])],
+ "group_ids": [Command.set([group_user.id])],
+ }
+ )
+
# TODO: when migrating to 16.0, adapt checks in queue_job/tests/test_json_field.py
# to verify the context keys are encoded and remove these
def test_encoder_recordset_store_context(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
user_context = {"lang": "en_US", "tz": "Europe/Brussels"}
test_model = self.env(user=demo_user, context=user_context)["test.queue.job"]
value_json = json.dumps(test_model, cls=JobEncoder)
self.assertEqual(json.loads(value_json)["context"], user_context)
def test_encoder_recordset_context_filter_keys(self):
- demo_user = self.env.ref("base.user_demo")
+ demo_user = self.demo_user
user_context = {"lang": "en_US", "tz": "Europe/Brussels"}
tampered_context = dict(user_context, foo=object())
test_model = self.env(user=demo_user, context=tampered_context)[