-
-
Notifications
You must be signed in to change notification settings - Fork 194
Open
Description
core/rq_config.py
from pydantic import Field
from arq.connections import RedisSettings
from core.base_config import MixinSettings
from core.redis_config import redis_config
from redis.backoff import ExponentialBackoff
from redis.retry import Retry
from redis import (
ConnectionError,
TimeoutError
)
class ArqConfig(MixinSettings):
redis_settings: RedisSettings = RedisSettings.from_dsn(redis_config.connection_string)
max_jobs: int = Field(env='MAX_JOBS', default=100)
keep_result: int = Field(env='KEEP_RESULT', default=300)
arq_config = ArqConfig()
retry = Retry(ExponentialBackoff(), 3)
arq_config.redis_settings.retry = retry
arq_config.redis_settings.retry_on_error = [ConnectionError,TimeoutError]
arq_config.redis_settings.retry_on_timeout = True
woker.py
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from arq.connections import RedisSettings
from core.arq_config import arq_config
from worker.calculate_anomaly import calc_anomaly
from worker.calculate_kip import calc_kip
class WorkerSettings:
functions = [calc_anomaly,calc_kip]
redis_settings = arq_config.redis_settings
max_jobs = arq_config.max_jobs
keep_result = arq_config.keep_result
run arq command ['arq src.worker.arq_worker.WorkerSettings']
This works fine, but after restarting redis, the worker crashes immediately, although I have set the settings for reconnection.
raised Exception:
redis.exceptions.ConnectionError: Connection closed by server.
Questions for the Arq Team
Can you tell me what to do so that the worker does not fall, but tries to reconnect to redis?
Metadata
Metadata
Assignees
Labels
No labels