1、简介

Celery 是一个简单、灵活且可靠的分布式系统,用于 处理大量消息,同时为操作提供 维护此类系统所需的工具。专注于实时处理的任务队列,同时也 支持任务调度。

2、更多示例(celery)

2.1 例子1

  • config.py
# config.py
# 设置配置
BROKER_URL =  'amqp://username:password@localhost:5672/yourvhost'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
CELERY_TASK_SERIALIZER = 'msgpack'
CELERY_RESULT_SERIALIZER = 'msgpack'
CELERY_TASK_RESULT_EXPIRES = 60 * 60 * 24
CELERY_ACCEPT_CONTENT = ["msgpack"]
CELERY_DEFAULT_QUEUE = "default"   
CELERY_QUEUES = {
    "default": { # 这是上面指定的默认队列
        "exchange": "default",
        "exchange_type": "direct",
        "routing_key": "default"
    }
}
  • tasks.py
# tasks.py
from app import celery

@celery.task
def add(x, y):
    return x + y

@celery.task(name="sub")
def sub(x, y):
    return x - y
  • app.py
# app.py --- 初始化celery对象 
from celery import Celery
import config
from task import add, sub

celery = Celery(__name__, include=["task"]) # 设置需要导入的模块
# 引入配置文件
celery.config_from_object(config)

if __name__ == '__main__':
    add.apply_async((2,2), 
        routing_key='default',
        priority=0,
        exchange='default')

2.2 例子2

  • config.py
# coding: utf-8
from celery import Celery

celery_broker = 'amqp://guest@127.0.0.1//'
celery_backend = 'amqp://guest@127.0.0.1//'

# Add tasks here
CELERY_IMPORTS = (
    'tasks',
)

app = Celery('celery', broker=celery_broker, 
backend=celery_backend, include=CELERY_IMPORTS)

app.conf.update(
    CELERY_ACKS_LATE=True,  # 允许重试
    CELERY_ACCEPT_CONTENT=['pickle', 'json'],
    CELERY_TASK_SERIALIZER='json',
    CELERY_RESULT_SERIALIZER='json',
    # 设置并发worker数量
    CELERYD_CONCURRENCY=4, 
    # 每个worker最多执行500个任务被销毁,可以防止内存泄漏
    CELERYD_MAX_TASKS_PER_CHILD=500, 
    BROKER_HEARTBEAT=0,  # 心跳
    CELERYD_TASK_TIME_LIMIT=12 * 30,  # 超时时间
)

# 时区
app.conf.timezone = 'Asia/Shanghai'
# 是否使用UTC
app.conf.enable_utc = True
# 任务的定时配置
from datetime import timedelta
from celery.schedules import crontab
app.conf.beat_schedule = {
    'sub': {
        'task': 'tasks.sub',
        'schedule': timedelta(seconds=3),
        # 每周一早八点
        # 'schedule': crontab(hour=8, day_of_week=1), 
        'args': (300, 150),
    }
}
  • tasks.py
#coding=utf-8
from config import app
from celery.signals import worker_process_init, worker_process_shutdown

@worker_process_init.connect
def init_worker(*args, **kwargs):
    # 初始化资源
    pass

@worker_process_shutdown.connect
def release_worker(*args, **kwargs):
    # 释放资源
    pass


# 普通函数装饰为 celery task
@app.task
def add(x, y):
    return x + y

@app.task
def sub(x, y):
    return x - y
  • main.py
#coding=utf-8

import time
from tasks import add

if __name__ == '__main__':
    a = time.time()
    # delay与apply_async生成的都是AsyncResult对象
    async = add.apply_async((1, 100))
    if async.successful():
        result = async.get()
        print(result)
    elif async.failed():
        print('任务失败')
    elif async.status == 'PENDING':
        print('任务等待中被执行')
    elif async.status == 'RETRY':
        print('任务异常后正在重试')
    elif async.status == 'STARTED':
        print('任务已经开始被执行')

2.3 例子3

  • task1.py
# -*- coding: utf-8 -*-

# 使用celery
import time
from celery import Celery
import redis

# 创建一个Celery类的实例对象
app = Celery('celery_demo', broker='redis://127.0.0.1:6379/15')

@app.task
def add(a, b):
    count = a + b
    print('任务函数正在执行....')
    time.sleep(1)
    return count
  • app1.py
import time
from task1 import add


def notity(a, b):
    result = add.delay(a, b)
    return result


if __name__ == '__main__':
    for i in range(5):
        time.sleep(1)
        result = notity(i, 100)
        print(result)

先执行celery命令:

celery -A task1 worker -l info -P eventlet

在这里插入图片描述 再执行worker命令:

python app1.py

在这里插入图片描述

2.4 例子4

  • celery_config.py
#-*-coding=utf-8-*-
from __future__ import absolute_import

from celery.schedules import crontab
# 中间件
BROKER_URL = 'redis://localhost:6379/6'# 结果存储CELERY_RESULT_BACKEND = 'redis://:127.0.0.1:6379/5' 
# 默认worker队列
CELERY_DEFAULT_QUEUE = 'default'
# 异步任务
CELERY_IMPORTS = (
    "tasks"
)

from datetime import timedelta
# celery beat
CELERYBEAT_SCHEDULE = {
    'add':{
        'task':'tasks.add',
        'schedule':timedelta(seconds=10),
        'args':(1,12)
    },
    # 每10s执行一次
    'task1': {
        'task': 'tasks.add',
        'schedule': timedelta(seconds=10),
        'args': (2, 8)
    },
    # 每天15:00执行
    'task2': {
        'task': 'tasks.add',
        'schedule': crontab(hour=15, minute=0),
        'args': (9, 9)
    }
}
  • celery_app.py
from __future__ import absolute_import
from celery import Celery

app = Celery('celery_app')
app.config_from_object('celery_config')
  • tasks.py
from celery_app import app

@app.task(queue='default')
def add(x, y):
    return x + y

@app.task(queue='default')
def sub(x, y):
    return x - y
  • app.py
import sys, os

# sys.path.append(os.path.abspath('.'))
sys.path.append(os.path.abspath('..'))
from tasks import add

def add_loop():
    ret = add.apply_async((1, 2), queue='default')
    print(type(ret))
    return ret

if __name__ == '__main__':
    ret = add_loop()
    print(ret.get())
    print(ret.status)
  • 运行命令 (1)终端输入:celery -A celery_app worker -Q default --loglevel=info (2)终端输入:celery -A celery_app beat (3)终端执行:python app.py

  • 启动worker

celery -A celery_app worker --loglevel=info
#celery -A celery_app worker --loglevel=info --concurrency=10
  • 启动定时任务配置:
celery -A celery_app beat --loglevel=info
  • 同时启动worker和beat:
celery -A celery_app worker --beat --loglevel=info

结语

如果您觉得该方法或代码有一点点用处,可以给作者点个赞,或打赏杯咖啡;╮( ̄▽ ̄)╭ 如果您感觉方法或代码不咋地//(ㄒoㄒ)//,就在评论处留言,作者继续改进;o_O??? 如果您需要相关功能的代码定制化开发,可以留言私信作者;(✿◡‿◡) 感谢各位大佬童鞋们的支持!( ´ ▽´ )ノ ( ´ ▽´)っ!!!