From 58fb14a9a60d2620dfb3f4bf980bff4cc2c94e22 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?S=C3=A9bastien=20Alix?=
Date: Fri, 29 Jul 2022 10:54:50 +0200
Subject: [PATCH 01/12] base_import_async: add a dedicated channel
---
base_import_async/data/queue_job_function_data.xml | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/base_import_async/data/queue_job_function_data.xml b/base_import_async/data/queue_job_function_data.xml
index 22cc8dbab0..fb04a63613 100644
--- a/base_import_async/data/queue_job_function_data.xml
+++ b/base_import_async/data/queue_job_function_data.xml
@@ -1,7 +1,13 @@
+
+ base_import
+
+
+
_split_file
+
_import_one_chunk
+
Date: Tue, 18 Oct 2022 13:04:31 +0200
Subject: [PATCH 02/12] Update test with proper way to pass company to job
---
test_queue_job/models/test_models.py | 2 +-
test_queue_job/tests/test_job.py | 41 ++++++++++++++++++++++++++++
2 files changed, 42 insertions(+), 1 deletion(-)
diff --git a/test_queue_job/models/test_models.py b/test_queue_job/models/test_models.py
index f810dba862..573e2380a9 100644
--- a/test_queue_job/models/test_models.py
+++ b/test_queue_job/models/test_models.py
@@ -40,7 +40,7 @@ class ModelTestQueueJob(models.Model):
# to test the context is serialized/deserialized properly
@api.model
def _job_prepare_context_before_enqueue_keys(self):
- return ("tz", "lang")
+ return ("tz", "lang", "allowed_company_ids")
def testing_method(self, *args, **kwargs):
"""Method used for tests
diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py
index 1585f992f0..d542d4b911 100644
--- a/test_queue_job/tests/test_job.py
+++ b/test_queue_job/tests/test_job.py
@@ -185,6 +185,47 @@ def test_postpone(self):
self.assertEqual(job_a.result, "test")
self.assertFalse(job_a.exc_info)
+ def test_company_simple(self):
+ company = self.env.ref("base.main_company")
+ eta = datetime.now() + timedelta(hours=5)
+ test_job = Job(
+ self.env["test.queue.job"].with_company(company).testing_method,
+ args=("o", "k"),
+ kwargs={"return_context": 1},
+ priority=15,
+ eta=eta,
+ description="My description",
+ )
+ test_job.worker_pid = 99999 # normally set on "set_start"
+ test_job.store()
+ job_read = Job.load(self.env, test_job.uuid)
+ self.assertEqual(test_job.func.__func__, job_read.func.__func__)
+ result_ctx = job_read.func(*tuple(test_job.args), **test_job.kwargs)
+ self.assertEqual(result_ctx.get("allowed_company_ids"), company.ids)
+
+ def test_company_complex(self):
+ company1 = self.env.ref("base.main_company")
+ company2 = company1.create({"name": "Queue job company"})
+ companies = company1 | company2
+ self.env.user.write({"company_ids": [(6, False, companies.ids)]})
+ # Ensure the main company still the first
+ self.assertEqual(self.env.user.company_id, company1)
+ eta = datetime.now() + timedelta(hours=5)
+ test_job = Job(
+ self.env["test.queue.job"].with_company(company2).testing_method,
+ args=("o", "k"),
+ kwargs={"return_context": 1},
+ priority=15,
+ eta=eta,
+ description="My description",
+ )
+ test_job.worker_pid = 99999 # normally set on "set_start"
+ test_job.store()
+ job_read = Job.load(self.env, test_job.uuid)
+ self.assertEqual(test_job.func.__func__, job_read.func.__func__)
+ result_ctx = job_read.func(*tuple(test_job.args), **test_job.kwargs)
+ self.assertEqual(result_ctx.get("allowed_company_ids"), company2.ids)
+
def test_store(self):
test_job = Job(self.method)
test_job.store()
From 56fdd3eec3e3fb6607071358e13829b3f19e36c0 Mon Sep 17 00:00:00 2001
From: Hans Henrik Gabelgaard
Date: Tue, 31 Oct 2023 15:58:52 +0100
Subject: [PATCH 03/12] [FIX] Access error on ir.attachment create
As normal user only have read access to delay.export, we need sudo to chreate an attachment on this record
Use user 6 (Marc Demo) in the tests, not admin (2)
---
base_export_async/models/delay_export.py | 20 +++++++++++--------
.../tests/test_base_export_async.py | 4 ++--
2 files changed, 14 insertions(+), 10 deletions(-)
diff --git a/base_export_async/models/delay_export.py b/base_export_async/models/delay_export.py
index a7dde9ba81..549d44a1e1 100644
--- a/base_export_async/models/delay_export.py
+++ b/base_export_async/models/delay_export.py
@@ -89,14 +89,18 @@ def export(self, params):
export_record = self.sudo().create({"user_ids": [(6, 0, users.ids)]})
name = "{}.{}".format(model_name, export_format)
- attachment = self.env["ir.attachment"].create(
- {
- "name": name,
- "datas": base64.b64encode(content),
- "type": "binary",
- "res_model": self._name,
- "res_id": export_record.id,
- }
+ attachment = (
+ self.env["ir.attachment"]
+ .sudo()
+ .create(
+ {
+ "name": name,
+ "datas": base64.b64encode(content),
+ "type": "binary",
+ "res_model": self._name,
+ "res_id": export_record.id,
+ }
+ )
)
url = "{}/web/content/ir.attachment/{}/datas/{}?download=true".format(
diff --git a/base_export_async/tests/test_base_export_async.py b/base_export_async/tests/test_base_export_async.py
index 482fabaee0..d10ca04281 100644
--- a/base_export_async/tests/test_base_export_async.py
+++ b/base_export_async/tests/test_base_export_async.py
@@ -23,7 +23,7 @@
"domain": [],
"context": {"lang": "en_US", "tz": "Europe/Brussels", "uid": 2},
"import_compat": false,
- "user_ids": [2]
+ "user_ids": [6]
}"""
}
@@ -37,7 +37,7 @@
"domain": [],
"context": {"lang": "en_US", "tz": "Europe/Brussels", "uid": 2},
"import_compat": false,
- "user_ids": [2]
+ "user_ids": [6]
}"""
}
From 4ccc68e2f2eea3266dbd45e35db08d32d455346d Mon Sep 17 00:00:00 2001
From: inigogr
Date: Thu, 1 Feb 2024 10:07:01 +0100
Subject: [PATCH 04/12] queue_job: fix retry format with tuple values
Configuration of randomized retry intervals is not possible due to the formatting checks not being updated.
This should fix it.
---
queue_job/models/queue_job_function.py | 25 +++++++++++-----
test_queue_job/tests/__init__.py | 1 +
test_queue_job/tests/test_job_function.py | 35 +++++++++++++++++++++++
3 files changed, 54 insertions(+), 7 deletions(-)
create mode 100644 test_queue_job/tests/test_job_function.py
diff --git a/queue_job/models/queue_job_function.py b/queue_job/models/queue_job_function.py
index 4f351659bd..ad034b46bc 100644
--- a/queue_job/models/queue_job_function.py
+++ b/queue_job/models/queue_job_function.py
@@ -155,10 +155,12 @@ def _parse_retry_pattern(self):
try:
# as json can't have integers as keys and the field is stored
# as json, convert back to int
- retry_pattern = {
- int(try_count): postpone_seconds
- for try_count, postpone_seconds in self.retry_pattern.items()
- }
+ retry_pattern = {}
+ for try_count, postpone_value in self.retry_pattern.items():
+ if isinstance(postpone_value, int):
+ retry_pattern[int(try_count)] = postpone_value
+ else:
+ retry_pattern[int(try_count)] = tuple(postpone_value)
except ValueError:
_logger.error(
"Invalid retry pattern for job function %s,"
@@ -187,8 +189,9 @@ def job_config(self, name):
def _retry_pattern_format_error_message(self):
return _(
"Unexpected format of Retry Pattern for {}.\n"
- "Example of valid format:\n"
- "{{1: 300, 5: 600, 10: 1200, 15: 3000}}"
+ "Example of valid formats:\n"
+ "{{1: 300, 5: 600, 10: 1200, 15: 3000}}\n"
+ "{{1: (1, 10), 5: (11, 20), 10: (21, 30), 15: (100, 300)}}"
).format(self.name)
@api.constrains("retry_pattern")
@@ -201,12 +204,20 @@ def _check_retry_pattern(self):
all_values = list(retry_pattern) + list(retry_pattern.values())
for value in all_values:
try:
- int(value)
+ self._retry_value_type_check(value)
except ValueError as ex:
raise exceptions.UserError(
record._retry_pattern_format_error_message()
) from ex
+ def _retry_value_type_check(self, value):
+ if isinstance(value, (tuple, list)):
+ if len(value) != 2:
+ raise ValueError
+ [self._retry_value_type_check(element) for element in value]
+ return
+ int(value)
+
def _related_action_format_error_message(self):
return _(
"Unexpected format of Related Action for {}.\n"
diff --git a/test_queue_job/tests/__init__.py b/test_queue_job/tests/__init__.py
index dc59429e71..0405022ce0 100644
--- a/test_queue_job/tests/__init__.py
+++ b/test_queue_job/tests/__init__.py
@@ -4,5 +4,6 @@
from . import test_job
from . import test_job_auto_delay
from . import test_job_channels
+from . import test_job_function
from . import test_related_actions
from . import test_delay_mocks
diff --git a/test_queue_job/tests/test_job_function.py b/test_queue_job/tests/test_job_function.py
new file mode 100644
index 0000000000..17781ac475
--- /dev/null
+++ b/test_queue_job/tests/test_job_function.py
@@ -0,0 +1,35 @@
+import odoo.tests.common as common
+from odoo import exceptions
+
+
+class TestJobFunction(common.TransactionCase):
+ def setUp(self):
+ super(TestJobFunction, self).setUp()
+ self.test_function_model = self.env.ref(
+ "queue_job.job_function_queue_job__test_job"
+ )
+
+ def test_check_retry_pattern_randomized_case(self):
+ randomized_pattern = "{1: (10, 20), 2: (20, 40)}"
+ self.test_function_model.edit_retry_pattern = randomized_pattern
+ self.assertEqual(
+ self.test_function_model.edit_retry_pattern, randomized_pattern
+ )
+
+ def test_check_retry_pattern_fixed_case(self):
+ fixed_pattern = "{1: 10, 2: 20}"
+ self.test_function_model.edit_retry_pattern = fixed_pattern
+ self.assertEqual(self.test_function_model.edit_retry_pattern, fixed_pattern)
+
+ def test_check_retry_pattern_invalid_cases(self):
+ invalid_time_value_pattern = "{1: a, 2: 20}"
+ with self.assertRaises(exceptions.UserError):
+ self.test_function_model.edit_retry_pattern = invalid_time_value_pattern
+
+ invalid_retry_count_pattern = "{a: 10, 2: 20}"
+ with self.assertRaises(exceptions.UserError):
+ self.test_function_model.edit_retry_pattern = invalid_retry_count_pattern
+
+ invalid_randomized_pattern = "{1: (1, 2, 3), 2: 20}"
+ with self.assertRaises(exceptions.UserError):
+ self.test_function_model.edit_retry_pattern = invalid_randomized_pattern
From ca2a8c7d2c8a21279972e14a75c0873cf925976c Mon Sep 17 00:00:00 2001
From: Pierre Verkest
Date: Wed, 10 Apr 2024 18:29:50 +0200
Subject: [PATCH 05/12] [FIX] queue_job_cron_jobrunner: use priority to select
job
* use FIFO, firt createad job will be treat first
* if priority are different it take the precedent
Yet we are not using channel priority into account
---
queue_job_cron_jobrunner/models/queue_job.py | 2 +-
.../tests/test_queue_job.py | 30 ++++++++++++++++++-
2 files changed, 30 insertions(+), 2 deletions(-)
diff --git a/queue_job_cron_jobrunner/models/queue_job.py b/queue_job_cron_jobrunner/models/queue_job.py
index 2e19556b95..a3c3d721e4 100644
--- a/queue_job_cron_jobrunner/models/queue_job.py
+++ b/queue_job_cron_jobrunner/models/queue_job.py
@@ -40,7 +40,7 @@ def _acquire_one_job(self):
FROM queue_job
WHERE state = 'pending'
AND (eta IS NULL OR eta <= (now() AT TIME ZONE 'UTC'))
- ORDER BY date_created DESC
+ ORDER BY priority, date_created
LIMIT 1 FOR NO KEY UPDATE SKIP LOCKED
"""
)
diff --git a/queue_job_cron_jobrunner/tests/test_queue_job.py b/queue_job_cron_jobrunner/tests/test_queue_job.py
index 3f2e0ef637..54800b792c 100644
--- a/queue_job_cron_jobrunner/tests/test_queue_job.py
+++ b/queue_job_cron_jobrunner/tests/test_queue_job.py
@@ -67,5 +67,33 @@ def test_queue_job_cron_trigger_enqueue_dependencies(self):
self.assertEqual(job_record.state, "done", "Processed OK")
# if the state is "waiting_dependencies", it means the "enqueue_waiting()"
- # step has not been doen when the parent job has been done
+ # step has not been done when the parent job has been done
self.assertEqual(job_record_depends.state, "done", "Processed OK")
+
+ def test_acquire_one_job_use_priority(self):
+ with freeze_time("2024-01-01 10:01:01"):
+ self.env["res.partner"].with_delay(priority=3).create({"name": "test"})
+
+ with freeze_time("2024-01-01 10:02:01"):
+ job = (
+ self.env["res.partner"].with_delay(priority=1).create({"name": "test"})
+ )
+
+ with freeze_time("2024-01-01 10:03:01"):
+ self.env["res.partner"].with_delay(priority=2).create({"name": "test"})
+
+ self.assertEqual(self.env["queue.job"]._acquire_one_job(), job.db_record())
+
+ def test_acquire_one_job_consume_the_oldest_first(self):
+ with freeze_time("2024-01-01 10:01:01"):
+ job = (
+ self.env["res.partner"].with_delay(priority=30).create({"name": "test"})
+ )
+
+ with freeze_time("2024-01-01 10:02:01"):
+ self.env["res.partner"].with_delay(priority=30).create({"name": "test"})
+
+ with freeze_time("2024-01-01 10:03:01"):
+ self.env["res.partner"].with_delay(priority=30).create({"name": "test"})
+
+ self.assertEqual(self.env["queue.job"]._acquire_one_job(), job.db_record())
From dbe64b93201b350c1f0ee6cfa972a9fa2c8ecf71 Mon Sep 17 00:00:00 2001
From: Quoc Duong
Date: Mon, 29 Jul 2024 15:51:24 +0700
Subject: [PATCH 06/12] [FIX] queue_job: missing flush
- an explicit flush is needed or child jobs won't be updated
- no need to forward port, this was fixed already in 16.0+
---
test_queue_job/tests/test_job.py | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py
index d542d4b911..72d36f598b 100644
--- a/test_queue_job/tests/test_job.py
+++ b/test_queue_job/tests/test_job.py
@@ -530,6 +530,24 @@ def test_button_done(self):
stored.result, "Manually set to done by %s" % self.env.user.name
)
+ def test_button_done_enqueue_waiting_dependencies(self):
+ job_root = Job(self.env["test.queue.job"].testing_method)
+ job_child = Job(self.env["test.queue.job"].testing_method)
+ job_child.add_depends({job_root})
+
+ DelayableGraph._ensure_same_graph_uuid([job_root, job_child])
+ job_root.store()
+ job_child.store()
+
+ self.assertEqual(job_child.state, WAIT_DEPENDENCIES)
+ record_root = job_root.db_record()
+ record_child = job_child.db_record()
+ # Trigger button done
+ record_root.button_done()
+ # Check the state
+ self.assertEqual(record_root.state, DONE)
+ self.assertEqual(record_child.state, PENDING)
+
def test_requeue(self):
stored = self._create_job()
stored.write({"state": "failed"})
From ff52b57ff51b9cf743c740bf5008e7aee530fd57 Mon Sep 17 00:00:00 2001
From: Quoc Duong
Date: Thu, 1 Aug 2024 11:45:38 +0700
Subject: [PATCH 07/12] [IMP] queue_job: Cancel child jobs when the parent is
cancelled
---
queue_job/job.py | 12 ++++++++++--
queue_job/models/queue_job.py | 2 ++
test_queue_job/tests/test_job.py | 19 +++++++++++++++++++
3 files changed, 31 insertions(+), 2 deletions(-)
diff --git a/queue_job/job.py b/queue_job/job.py
index 920a8a0781..465193963b 100644
--- a/queue_job/job.py
+++ b/queue_job/job.py
@@ -539,8 +539,8 @@ def perform(self):
return self.result
- def enqueue_waiting(self):
- sql = """
+ def _get_common_dependent_jobs_query(self):
+ return """
UPDATE queue_job
SET state = %s
FROM (
@@ -568,9 +568,17 @@ def enqueue_waiting(self):
AND %s = ALL(jobs.parent_states)
AND state = %s;
"""
+
+ def enqueue_waiting(self):
+ sql = self._get_common_dependent_jobs_query()
self.env.cr.execute(sql, (PENDING, self.uuid, DONE, WAIT_DEPENDENCIES))
self.env["queue.job"].invalidate_model(["state"])
+ def cancel_dependent_jobs(self):
+ sql = self._get_common_dependent_jobs_query()
+ self.env.cr.execute(sql, (CANCELLED, self.uuid, CANCELLED, WAIT_DEPENDENCIES))
+ self.env["queue.job"].invalidate_cache(["state"])
+
def store(self):
"""Store the Job"""
job_model = self.env["queue.job"]
diff --git a/queue_job/models/queue_job.py b/queue_job/models/queue_job.py
index 8af7468b7c..ff3723478b 100644
--- a/queue_job/models/queue_job.py
+++ b/queue_job/models/queue_job.py
@@ -328,6 +328,8 @@ def _change_job_state(self, state, result=None):
elif state == CANCELLED:
job_.set_cancelled(result=result)
job_.store()
+ record.env["queue.job"].flush_model()
+ job_.cancel_dependent_jobs()
else:
raise ValueError("State not supported: %s" % state)
diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py
index 72d36f598b..48687450f8 100644
--- a/test_queue_job/tests/test_job.py
+++ b/test_queue_job/tests/test_job.py
@@ -15,6 +15,7 @@
RetryableJobError,
)
from odoo.addons.queue_job.job import (
+ CANCELLED,
DONE,
ENQUEUED,
FAILED,
@@ -548,6 +549,24 @@ def test_button_done_enqueue_waiting_dependencies(self):
self.assertEqual(record_root.state, DONE)
self.assertEqual(record_child.state, PENDING)
+ def test_button_cancel_dependencies(self):
+ job_root = Job(self.env["test.queue.job"].testing_method)
+ job_child = Job(self.env["test.queue.job"].testing_method)
+ job_child.add_depends({job_root})
+
+ DelayableGraph._ensure_same_graph_uuid([job_root, job_child])
+ job_root.store()
+ job_child.store()
+
+ self.assertEqual(job_child.state, WAIT_DEPENDENCIES)
+ record_root = job_root.db_record()
+ record_child = job_child.db_record()
+ # Trigger button cancelled
+ record_root.button_cancelled()
+ # Check the state
+ self.assertEqual(record_root.state, CANCELLED)
+ self.assertEqual(record_child.state, CANCELLED)
+
def test_requeue(self):
stored = self._create_job()
stored.write({"state": "failed"})
From 6d1b1cb1a0c345b5c08f195800c44c21df435c89 Mon Sep 17 00:00:00 2001
From: Florent Xicluna
Date: Mon, 16 Sep 2024 13:43:47 +0200
Subject: [PATCH 08/12] [FIX] queue_job: typo
---
queue_job/README.rst | 2 +-
queue_job/readme/USAGE.rst | 2 +-
queue_job/static/description/index.html | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/queue_job/README.rst b/queue_job/README.rst
index 343c0813db..4a6fa5ede8 100644
--- a/queue_job/README.rst
+++ b/queue_job/README.rst
@@ -441,7 +441,7 @@ be customized in ``Base._job_prepare_context_before_enqueue_keys``.
When you are developing (ie: connector modules) you might want
to bypass the queue job and run your code immediately.
-To do so you can set `QUEUE_JOB__NO_DELAY=1` in your enviroment.
+To do so you can set `QUEUE_JOB__NO_DELAY=1` in your environment.
**Bypass jobs in tests**
diff --git a/queue_job/readme/USAGE.rst b/queue_job/readme/USAGE.rst
index 84eb38b315..b1a0e6a4cf 100644
--- a/queue_job/readme/USAGE.rst
+++ b/queue_job/readme/USAGE.rst
@@ -286,7 +286,7 @@ be customized in ``Base._job_prepare_context_before_enqueue_keys``.
When you are developing (ie: connector modules) you might want
to bypass the queue job and run your code immediately.
-To do so you can set `QUEUE_JOB__NO_DELAY=1` in your enviroment.
+To do so you can set `QUEUE_JOB__NO_DELAY=1` in your environment.
**Bypass jobs in tests**
diff --git a/queue_job/static/description/index.html b/queue_job/static/description/index.html
index 515bc8dcc7..d38a617fa7 100644
--- a/queue_job/static/description/index.html
+++ b/queue_job/static/description/index.html
@@ -752,7 +752,7 @@
When you are developing (ie: connector modules) you might want
to bypass the queue job and run your code immediately.
-To do so you can set QUEUE_JOB__NO_DELAY=1 in your enviroment.
+To do so you can set QUEUE_JOB__NO_DELAY=1 in your environment.
Bypass jobs in tests
When writing tests on job-related methods is always tricky to deal with
delayed recordsets. To make your testing life easier
From d5b6903ae3b1f42aa245cf961717e5a5b0b7e916 Mon Sep 17 00:00:00 2001
From: Florent Xicluna
Date: Mon, 16 Sep 2024 13:44:12 +0200
Subject: [PATCH 09/12] [IMP] queue_job: add filter on Date Created
---
queue_job/views/queue_job_views.xml | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
diff --git a/queue_job/views/queue_job_views.xml b/queue_job/views/queue_job_views.xml
index a7099254d0..40d060931a 100644
--- a/queue_job/views/queue_job_views.xml
+++ b/queue_job/views/queue_job_views.xml
@@ -250,6 +250,22 @@
string="Cancelled"
domain="[('state', '=', 'cancelled')]"
/>
+
+
+
+
+
From a7431d540f252e676f1b78364af73190ccc202c0 Mon Sep 17 00:00:00 2001
From: Florent Xicluna
Date: Thu, 27 Jan 2022 11:15:55 +0100
Subject: [PATCH 10/12] [REF] remove explicit 'object' inheritance
---
queue_job/delay.py | 2 +-
queue_job/job.py | 2 +-
queue_job/jobrunner/channels.py | 10 +++++-----
queue_job/jobrunner/runner.py | 4 ++--
test_queue_job/tests/test_job.py | 2 +-
5 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/queue_job/delay.py b/queue_job/delay.py
index e46e95aed9..726e850494 100644
--- a/queue_job/delay.py
+++ b/queue_job/delay.py
@@ -609,7 +609,7 @@ def _execute_direct(self):
self._generated_job.perform()
-class DelayableRecordset(object):
+class DelayableRecordset:
"""Allow to delay a method for a recordset (shortcut way)
Usage::
diff --git a/queue_job/job.py b/queue_job/job.py
index 465193963b..288b3e0421 100644
--- a/queue_job/job.py
+++ b/queue_job/job.py
@@ -105,7 +105,7 @@ def identity_exact_hasher(job_):
@total_ordering
-class Job(object):
+class Job:
"""A Job is a task to execute. It is the in-memory representation of a job.
Jobs are stored in the ``queue.job`` Odoo Model, but they are handled
diff --git a/queue_job/jobrunner/channels.py b/queue_job/jobrunner/channels.py
index 6812aa4960..468fb5760d 100644
--- a/queue_job/jobrunner/channels.py
+++ b/queue_job/jobrunner/channels.py
@@ -14,7 +14,7 @@
_logger = logging.getLogger(__name__)
-class PriorityQueue(object):
+class PriorityQueue:
"""A priority queue that supports removing arbitrary objects.
Adding an object already in the queue is a no op.
@@ -103,7 +103,7 @@ def pop(self):
@total_ordering
-class ChannelJob(object):
+class ChannelJob:
"""A channel job is attached to a channel and holds the properties of a
job that are necessary to prioritise them.
@@ -205,7 +205,7 @@ def __lt__(self, other):
return self.sorting_key() < other.sorting_key()
-class ChannelQueue(object):
+class ChannelQueue:
"""A channel queue is a priority queue for jobs.
Jobs with an eta are set aside until their eta is past due, at
@@ -334,7 +334,7 @@ def get_wakeup_time(self, wakeup_time=0):
return wakeup_time
-class Channel(object):
+class Channel:
"""A channel for jobs, with a maximum capacity.
When jobs are created by queue_job modules, they may be associated
@@ -581,7 +581,7 @@ def split_strip(s, sep, maxsplit=-1):
return [x.strip() for x in s.split(sep, maxsplit)]
-class ChannelManager(object):
+class ChannelManager:
"""High level interface for channels
This class handles:
diff --git a/queue_job/jobrunner/runner.py b/queue_job/jobrunner/runner.py
index 25823a9973..025c228c62 100644
--- a/queue_job/jobrunner/runner.py
+++ b/queue_job/jobrunner/runner.py
@@ -259,7 +259,7 @@ def urlopen():
thread.start()
-class Database(object):
+class Database:
def __init__(self, db_name):
self.db_name = db_name
connection_info = _connection_info_for(db_name)
@@ -344,7 +344,7 @@ def set_job_enqueued(self, uuid):
)
-class QueueJobRunner(object):
+class QueueJobRunner:
def __init__(
self,
scheme="http",
diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py
index 48687450f8..4ff3c9233e 100644
--- a/test_queue_job/tests/test_job.py
+++ b/test_queue_job/tests/test_job.py
@@ -89,7 +89,7 @@ def test_infinite_retryable_error(self):
self.assertEqual(test_job.retry, 1)
def test_on_instance_method(self):
- class A(object):
+ class A:
def method(self):
pass
From 4ae757cf402ca25e0edeeb7f4f9ed9e11bdc0a22 Mon Sep 17 00:00:00 2001
From: Florent Xicluna
Date: Thu, 26 Dec 2024 17:28:17 +0100
Subject: [PATCH 11/12] [REF] remove explicit super() arguments
---
queue_job/tests/common.py | 2 +-
queue_job_subscribe/tests/test_job_subscribe.py | 2 +-
test_queue_job/models/test_models.py | 2 +-
test_queue_job/tests/test_job.py | 2 +-
test_queue_job/tests/test_job_function.py | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/queue_job/tests/common.py b/queue_job/tests/common.py
index c463d3456d..13f1f5f832 100644
--- a/queue_job/tests/common.py
+++ b/queue_job/tests/common.py
@@ -428,7 +428,7 @@ def __init__(
def setUp(self):
"""Log an extra statement which test is started."""
- super(OdooDocTestCase, self).setUp()
+ super().setUp()
logging.getLogger(__name__).info("Running tests for %s", self._dt_test.name)
diff --git a/queue_job_subscribe/tests/test_job_subscribe.py b/queue_job_subscribe/tests/test_job_subscribe.py
index 0f1fcddf48..935f15f74a 100644
--- a/queue_job_subscribe/tests/test_job_subscribe.py
+++ b/queue_job_subscribe/tests/test_job_subscribe.py
@@ -8,7 +8,7 @@
class TestJobSubscribe(common.TransactionCase):
def setUp(self):
- super(TestJobSubscribe, self).setUp()
+ super().setUp()
grp_queue_job_manager = self.ref("queue_job.group_queue_job_manager")
self.other_partner_a = self.env["res.partner"].create(
{"name": "My Company a", "is_company": True, "email": "test@tes.ttest"}
diff --git a/test_queue_job/models/test_models.py b/test_queue_job/models/test_models.py
index 573e2380a9..03fa792137 100644
--- a/test_queue_job/models/test_models.py
+++ b/test_queue_job/models/test_models.py
@@ -76,7 +76,7 @@ def job_with_retry_pattern__no_zero(self):
return
def mapped(self, func):
- return super(ModelTestQueueJob, self).mapped(func)
+ return super().mapped(func)
def job_alter_mutable(self, mutable_arg, mutable_kwarg=None):
mutable_arg.append(2)
diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py
index 4ff3c9233e..d7414ef7aa 100644
--- a/test_queue_job/tests/test_job.py
+++ b/test_queue_job/tests/test_job.py
@@ -650,7 +650,7 @@ class TestJobStorageMultiCompany(common.TransactionCase):
"""Test storage of jobs"""
def setUp(self):
- super(TestJobStorageMultiCompany, self).setUp()
+ super().setUp()
self.queue_job = self.env["queue.job"]
grp_queue_job_manager = self.ref("queue_job.group_queue_job_manager")
User = self.env["res.users"]
diff --git a/test_queue_job/tests/test_job_function.py b/test_queue_job/tests/test_job_function.py
index 17781ac475..320b4973c5 100644
--- a/test_queue_job/tests/test_job_function.py
+++ b/test_queue_job/tests/test_job_function.py
@@ -4,7 +4,7 @@
class TestJobFunction(common.TransactionCase):
def setUp(self):
- super(TestJobFunction, self).setUp()
+ super().setUp()
self.test_function_model = self.env.ref(
"queue_job.job_function_queue_job__test_job"
)
From 10f7161f97f06411816d1f63b8607ef9c397356b Mon Sep 17 00:00:00 2001
From: Lois Rilo
Date: Fri, 11 Jun 2021 16:11:14 +0200
Subject: [PATCH 12/12] [13.0][FIX] queue_job_cron: channel_id must be
storable.
Otherwise, you cannot use any channel other than default (
root.ir_cron)
---
queue_job_cron/models/ir_cron.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/queue_job_cron/models/ir_cron.py b/queue_job_cron/models/ir_cron.py
index 7e4f5b848d..bb09ed075e 100644
--- a/queue_job_cron/models/ir_cron.py
+++ b/queue_job_cron/models/ir_cron.py
@@ -28,13 +28,16 @@ class IrCron(models.Model):
comodel_name="queue.job.channel",
compute="_compute_run_as_queue_job",
readonly=False,
+ store=True,
string="Channel",
)
@api.depends("run_as_queue_job")
def _compute_run_as_queue_job(self):
for cron in self:
- if cron.run_as_queue_job and not cron.channel_id:
+ if cron.channel_id:
+ continue
+ if cron.run_as_queue_job:
cron.channel_id = self.env.ref("queue_job_cron.channel_root_ir_cron").id
else:
cron.channel_id = False