I am able to run celery shared tasks in the flask (called web) container’s flask shell without issue, however they time out when running in a route. Suspecting that the redis container could not be reached, I ran the following in both the container’s flask shell and inside a route. I found the redis container’s ip with docker network inspect <network name>
.
print(f'{redis.Redis(host="172.22.0.2", port=6379).ping()=}')
print(f'{redis.Redis(host='redis', port=6379).ping()=}')
<code>import redis
print(f'{redis.Redis(host="172.22.0.2", port=6379).ping()=}')
print(f'{redis.Redis(host='redis', port=6379).ping()=}')
</code>
import redis
print(f'{redis.Redis(host="172.22.0.2", port=6379).ping()=}')
print(f'{redis.Redis(host='redis', port=6379).ping()=}')
The ip reference runs properly in both flask shell and route. However, the container name reference fails only in the route. I do not understand how the name can properly resolve in the flask shell, but not the route.
docker-compose.yml
dockerfile: ./docker/web/Dockerfile
dockerfile: ./docker/web/Dockerfile
command: /start-celeryworker
dockerfile: ./docker/web/Dockerfile]
command: /start-celerybeat
dockerfile: ./docker/web/Dockerfile
dockerfile: ./docker/nginx/Dockerfile
<code>services:
web:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile
command: /start
volumes:
- .:/app
expose:
- 5000
env_file:
- .env
environment:
- FLASK_APP=app
depends_on:
- redis
redis:
image: redis:7-alpine
ports:
- '6379:6379'
celery_worker:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile
command: /start-celeryworker
volumes:
- .:/app
env_file:
- .env
environment:
- FLASK_APP=celery_app
depends_on:
- redis
celery_beat:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile]
command: /start-celerybeat
volumes:
- .:/app
env_file:
- .env
environment:
- FLASK_APP=celery_app
depends_on:
- redis
flower:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile
command: /start-flower
volumes:
- .:/app
env_file:
- .env
environment:
- FLASK_APP=celery_app
ports:
- '5555:5555'
depends_on:
- redis
nginx:
build:
context: .
dockerfile: ./docker/nginx/Dockerfile
ports:
- '443:443'
- '80:80'
depends_on:
- web
</code>
services:
web:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile
command: /start
volumes:
- .:/app
expose:
- 5000
env_file:
- .env
environment:
- FLASK_APP=app
depends_on:
- redis
redis:
image: redis:7-alpine
ports:
- '6379:6379'
celery_worker:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile
command: /start-celeryworker
volumes:
- .:/app
env_file:
- .env
environment:
- FLASK_APP=celery_app
depends_on:
- redis
celery_beat:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile]
command: /start-celerybeat
volumes:
- .:/app
env_file:
- .env
environment:
- FLASK_APP=celery_app
depends_on:
- redis
flower:
build:
context: .
args:
- redacted
dockerfile: ./docker/web/Dockerfile
command: /start-flower
volumes:
- .:/app
env_file:
- .env
environment:
- FLASK_APP=celery_app
ports:
- '5555:5555'
depends_on:
- redis
nginx:
build:
context: .
dockerfile: ./docker/nginx/Dockerfile
ports:
- '443:443'
- '80:80'
depends_on:
- web
web > init.py
from flask_celeryext import FlaskCeleryExt
from web.celery_utils import make_celery
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app(config_name=None):
config_name = os.environ.get('FLASK_CONFIG', 'development')
app.config.from_object(config[config_name])
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
# shell context for flask cli
@app.shell_context_processor
return {'app':app, 'db':db}
<code>...
from flask_celeryext import FlaskCeleryExt
from web.celery_utils import make_celery
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
...
def create_app(config_name=None):
if config_name is None:
config_name = os.environ.get('FLASK_CONFIG', 'development')
app = Flask(__name__)
# set config
app.config.from_object(config[config_name])
# set up extensions
ext_celery.init_app(app)
...
# register blueprints
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
...
# shell context for flask cli
@app.shell_context_processor
def ctx():
return {'app':app, 'db':db}
return app
</code>
...
from flask_celeryext import FlaskCeleryExt
from web.celery_utils import make_celery
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
...
def create_app(config_name=None):
if config_name is None:
config_name = os.environ.get('FLASK_CONFIG', 'development')
app = Flask(__name__)
# set config
app.config.from_object(config[config_name])
# set up extensions
ext_celery.init_app(app)
...
# register blueprints
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
...
# shell context for flask cli
@app.shell_context_processor
def ctx():
return {'app':app, 'db':db}
return app
celery_app.py
<code>from web import create_app, ext_celery
app = create_app(config_name='development')
celery = ext_celery.celery
<code>from web import create_app, ext_celery
app = create_app(config_name='development')
celery = ext_celery.celery
</code>
from web import create_app, ext_celery
app = create_app(config_name='development')
celery = ext_celery.celery
app.py
<code>from web import create_app, ext_celery
app = create_app(config_name='development')
celery = ext_celery.celery
<code>from web import create_app, ext_celery
app = create_app(config_name='development')
celery = ext_celery.celery
</code>
from web import create_app, ext_celery
app = create_app(config_name='development')
celery = ext_celery.celery
web > config.py
from datetime import timedelta
from celery.schedules import crontab
BASE_DIR = Path(__file__).parent.parent
FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN')
SECRET_KEY = os.environ.get('SECRET_KEY')
BROKER_URL = 'redis://redis:6379/0'
CELERY_RESULT_BACKEND = 'redis://redis:6379/0'
# Force all queues to be explicitly listed in 'CELERY_TASK_QUEUES' to help prevent typos
CELERY_TASK_CREATE_MISSING_QUEUES = False
CELERY_TASK_DEFAULT_QUEUE = 'default'
'task': 'default:email_reminder',
'schedule': crontab(minute=0, hour=12, day_of_week='mon,wed,fri') # Time is UTC
class DevelopmentConfig(BaseConfig):
TESTING = False # When True, prevents Celery tasks from working properly (flask-mail doesnt work,
# and shared tasks like add() perform synchronously, not asynchronously).
CELERY_BEAT_SCHEDULE = {}
'development': DevelopmentConfig
<code>import os
from pathlib import Path
from kombu import Queue
import json
from datetime import timedelta
from celery.schedules import crontab
class BaseConfig:
BASE_DIR = Path(__file__).parent.parent
TESTING = False
FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN')
SECRET_KEY = os.environ.get('SECRET_KEY')
BROKER_URL = 'redis://redis:6379/0'
CELERY_RESULT_BACKEND = 'redis://redis:6379/0'
# Force all queues to be explicitly listed in 'CELERY_TASK_QUEUES' to help prevent typos
CELERY_TASK_CREATE_MISSING_QUEUES = False
CELERY_TASK_DEFAULT_QUEUE = 'default'
CELERY_TASK_QUEUES = (
Queue('default'),
Queue('high_priority'),
Queue('low_priority'),
)
CELERY_TASK_ROUTES = {
'web.main.tasks.add': {
'queue': 'default',
},
}
CELERY_BEAT_SCHEDULE = {
'send-email-reminder': {
'task': 'default:email_reminder',
'schedule': crontab(minute=0, hour=12, day_of_week='mon,wed,fri') # Time is UTC
}
}
class DevelopmentConfig(BaseConfig):
DEBUG = True
TESTING = False # When True, prevents Celery tasks from working properly (flask-mail doesnt work,
# and shared tasks like add() perform synchronously, not asynchronously).
CELERY_BEAT_SCHEDULE = {}
config = {
'development': DevelopmentConfig
}
</code>
import os
from pathlib import Path
from kombu import Queue
import json
from datetime import timedelta
from celery.schedules import crontab
class BaseConfig:
BASE_DIR = Path(__file__).parent.parent
TESTING = False
FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN')
SECRET_KEY = os.environ.get('SECRET_KEY')
BROKER_URL = 'redis://redis:6379/0'
CELERY_RESULT_BACKEND = 'redis://redis:6379/0'
# Force all queues to be explicitly listed in 'CELERY_TASK_QUEUES' to help prevent typos
CELERY_TASK_CREATE_MISSING_QUEUES = False
CELERY_TASK_DEFAULT_QUEUE = 'default'
CELERY_TASK_QUEUES = (
Queue('default'),
Queue('high_priority'),
Queue('low_priority'),
)
CELERY_TASK_ROUTES = {
'web.main.tasks.add': {
'queue': 'default',
},
}
CELERY_BEAT_SCHEDULE = {
'send-email-reminder': {
'task': 'default:email_reminder',
'schedule': crontab(minute=0, hour=12, day_of_week='mon,wed,fri') # Time is UTC
}
}
class DevelopmentConfig(BaseConfig):
DEBUG = True
TESTING = False # When True, prevents Celery tasks from working properly (flask-mail doesnt work,
# and shared tasks like add() perform synchronously, not asynchronously).
CELERY_BEAT_SCHEDULE = {}
config = {
'development': DevelopmentConfig
}
web > main > tasks.py
<code>from celery import shared_task
logger = get_task_logger(__name__)
@shared_task(name='default:task_test')
logger.info('about to sleep...')
logger.info('executing...')
<code>from celery import shared_task
logger = get_task_logger(__name__)
@shared_task(name='default:task_test')
def task_test():
logger.info('about to sleep...')
time.sleep(5)
logger.info('executing...')
print('abc 123')
time.sleep(5)
logger.info('done.')
</code>
from celery import shared_task
logger = get_task_logger(__name__)
@shared_task(name='default:task_test')
def task_test():
logger.info('about to sleep...')
time.sleep(5)
logger.info('executing...')
print('abc 123')
time.sleep(5)
logger.info('done.')
web > main > views.py
from .tasks import task_test
@main.route('/test_task', methods=['GET'])
print(f'{os.system(f'ping redis')=}') # returns True (after I apt install iputils-ping)
print(f'{redis.Redis(host="172.23.0.2", port=6379).ping()=}')
print(f'{redis.Redis(host="redis", port=6379).ping()=}') # fails here unless commented out, but this function executes properly in flask shell
task_test.delay() # fails here, but this function executes properly in flask shell
<code>from . import main
from .tasks import task_test
@main.route('/test_task', methods=['GET'])
def test_task():
print(f'{os.system(f'ping redis')=}') # returns True (after I apt install iputils-ping)
print(f'{redis.Redis(host="172.23.0.2", port=6379).ping()=}')
print(f'{redis.Redis(host="redis", port=6379).ping()=}') # fails here unless commented out, but this function executes properly in flask shell
task_test.delay() # fails here, but this function executes properly in flask shell
return ''
</code>
from . import main
from .tasks import task_test
@main.route('/test_task', methods=['GET'])
def test_task():
print(f'{os.system(f'ping redis')=}') # returns True (after I apt install iputils-ping)
print(f'{redis.Redis(host="172.23.0.2", port=6379).ping()=}')
print(f'{redis.Redis(host="redis", port=6379).ping()=}') # fails here unless commented out, but this function executes properly in flask shell
task_test.delay() # fails here, but this function executes properly in flask shell
return ''