github
1. tgw
https://github.com/tgw2023/tgw
2. AmazingQuant
https://github.com/zhanggao2013/AmazingQuant
redis主要是为了作为celery的backend使用;
rabbitmq主要是为了作为celery的消息中间件使用;
一、redis
1.安装
按照官网说明即可
https://redis.io/docs/getting-started/
2.环境配置
这里为了简单上手,单节点即可,实际生产可用集群哨兵模式
配置文件
cp /etc/redis/redis.conf /home/zhanggao/redis/redis.conf
修改配置文件
日志路径
logfile "/home/zhanggao/redis/redis-serve.log"
data路径
dir /home/zhanggao/redis
启动服务
redis-server /home/zhanggao/redis/redis.conf
启动客户端
redis-cli -h 127.0.0.1 -p 6378
关闭
redis-cli -p 6378 shutdown
3.Python测试代码
import redis # 导入redis 模块
r = redis.Redis(host='localhost', port=6378, db=2,decode_responses=True)
r.set('name1:jghj', 'runoob1asd') # 设置 name 对应的值
print(r['name1:jghj'])
print(r.get('name1:jghj')) # 取出键 name 对应的值
print(type(r.get('name'))) # 查看类型
一、redis
1.安装
官网下载并安装Eralng和rabbitmq,注意版本一致。
2.常用命令
#管理命令
#注意命令前面需要加一个rabbitmqctl 空格
#添加用户:
rabbitmqctl.bat add_user <username> <password>
#用户授权
rabbitmqctl.bat set_user_tags <username> administrator
# 删除一个用户
rabbitmqctl.bat delete_user <username>
# 改变用户密码 (也是改变web管理登陆密码)
rabbitmqctl.bat change_password <username> <newpassword>
#服务启动与停止
#安装,启动,停止
rabbitmg-service.bat install 或 rabbitmg-service instal
rabbitmg-service.bat stop 或rabbitmg-service stoprabbitmg-service.bat start 或 rabbitmg-server start
#启用
rabbitmg-server enable
#禁用
rabbitmg-server disable
3.Python测试代码
(1)发送消息
import redis # 导入redis 模块
r = redis.Redis(host='localhost', port=6378, db=2,decode_responses=True)
r.set('name1:jghj', 'runoob1asd') # 设置 name 对应的值
print(r['name1:jghj'])
print(r.get('name1:jghj')) # 取出键 name 对应的值
print(type(r.get('name'))) # 查看类型
02
rabbitmq
1.安装
官网下载并安装Eralng和rabbitmq,注意版本一致。
2.常用命令
#管理命令
#注意命令前面需要加一个rabbitmqctl 空格
#添加用户:
rabbitmqctl.bat add_user <username> <password>
#用户授权
rabbitmqctl.bat set_user_tags <username> administrator
# 删除一个用户
rabbitmqctl.bat delete_user <username>
# 改变用户密码 (也是改变web管理登陆密码)
rabbitmqctl.bat change_password <username> <newpassword>
#服务启动与停止
#安装,启动,停止
rabbitmg-service.bat install 或 rabbitmg-service instal
rabbitmg-service.bat stop 或rabbitmg-service stoprabbitmg-service.bat start 或 rabbitmg-server start
#启用
rabbitmg-server enable
#禁用
rabbitmg-server disable
(2)接受消息
import time
import pika
hostname = 'localhost'
parameters = pika.ConnectionParameters(hostname)
connection = pika.BlockingConnection(parameters)
# 创建通道
channel = connection.channel()
# channel.queue_declare(queue='hello-1')
channel.exchange_declare(exchange='direct1-logs',
exchange_type='direct')
result = channel.queue_declare(exclusive=True, queue='')
queue_name = result.method.queue
print(queue_name)
channel.queue_bind(exchange='direct-logs',
queue=queue_name,
routing_key='hello-1')
def callback(ch, method, properties, body):
print(" [x] Received %r" % (body,))
print(" [x] %r:%r" % (method.routing_key, body))
print(" [x] Done")
ch.basic_ack(delivery_tag=method.delivery_tag)
# 告诉rabbitmq使用callback来接收信息
channel.basic_consume(queue_name, callback, consumer_tag="hello-consumer")
# 开始接收信息,并进入阻塞状态,队列里有信息才会调用callback进行处理,按ctrl+c退出
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
4.管理界面
celery
1.安装
pip install celery
pip install kombu
2.目录结构
3.init文件
from celery import Celery
app = Celery('celery_test', include=['celery_test.celery_app_task'])
app.config_from_object('celery_test.celeryconfig')
# celery worker -A celery_test -l info -n 1 -P eventlet
# celeryconfig 单节点redis
# celery flower --broker=redis://10.237.102.212:6379/13
# celeryconfig redis 集群
# celery flower -A celery_test -port=5555
4.add_task
import time
from celery_test.celery_app_task import add
from celery import group
data = [1, 2, 3]*10
r1 = group([add.s(i, i + 2) for i in data]).apply_async()
for async1 in r1:
while True:
if async1.successful():
result = async1.get()
print(result)
print('执行完成', time.time())
# async1.forget() # 将结果删除
break
elif async1.failed():
print('执行失败')
elif async1.status == 'PENDING':
print('任务等待中被执行')
elif async1.status == 'RETRY':
print('任务异常后正在重试')
elif async1.status == 'STARTED':
print('任务已经开始被执行')
5.celery_app_task
import time
import numpy as np
from celery_test import app
a = np.ones(1000000000)
@app.task(name='celery_test.celery_app_task.taskA')
def add(x, y):
time.sleep(4)
return x + y
6.celeryconfig
from kombu import Exchange, Queue
BROKER_URL = 'amqp://guest:[email protected]:15672/' # 使用amqp作为消息代理
# BROKER_URL = 'redis://127.0.0.1:6378/2' # 使用redis作为消息代理
RESULT_BROKER_TRANSPORT_OPTIONS = {"master_name": "mymaster"}
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/1' # 把任务结果存在了Redis
# redis集群哨兵模式---------------
# CELERY_RESULT_BACKEND = 'sentinel://10.237.102.210:26379/4;' \
# 'sentinel://10.237.102.211:26379/4;' \
# 'sentinel://10.237.102.212:26379/4'
# BROKER_URL = 'sentinel://10.237.102.210:26379/3;' \
# 'sentinel://10.237.102.211:26379/3;' \
# 'sentinel://10.237.102.212:26379/3'
#
# BROKER_TRANSPORT_OPTIONS = {
# 'master_name': 'mymaster',
# 'service_name': 'mymaster',
# 'socket_timeout': 6000,
# 'visibility_timeout': 3600,
# }
# CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = BROKER_TRANSPORT_OPTIONS
# redis集群哨兵模式---------------
IMPORTS = ("celery_test.celery_app_task",)
task_name_list = ['task_A', 'task_B', 'task_C', 'task_D']
CELERY_QUEUES = (
Queue("for_task_A", Exchange("for_task_A"), routing_key="for_task_A"),
Queue("for_task_B", Exchange("for_task_B"), routing_key="for_task_B"),
Queue("for_task_C", Exchange("for_task_C"), routing_key="for_task_C"),
Queue("for_task_D", Exchange("for_task_D"), routing_key="for_task_D")
)
CELERY_ROUTES = (
{
"celery_test.celery_app_task.taskA":
{
'queue': "for_task_A",
"routing_key": "for_task_A"
},
},
{
"celery_test.celery_app_task.taskB":
{
'queue': "for_task_B",
"routing_key": "for_task_B"
},
},
{
"celery_test.celery_app_task.taskC":
{
'queue': "for_task_C",
"routing_key": "for_task_C"
},
},
{
"celery_test.celery_app_task.taskD":
{
'queue': "for_task_D",
"routing_key": "for_task_D"
},
},
)
CELERY_TASK_SERIALIZER = 'msgpack' # 任务序列化和反序列化使用msgpack方案
# CELERY_TASK_SERIALIZER = 'json' # 任务序列化和反序列化使用msgpack方案
CELERY_RESULT_SERIALIZER = 'json' # 读取任务结果一般性能要求不高,所以使用了可读性更好的JSON
CELERY_TASK_RESULT_EXPIRES = 60*24 # 任务过期时间(秒)
CELERY_ACCEPT_CONTENT = ['json', 'msgpack'] # 指定接受的内容类型
CELERY_REJECT_ON_WORKER_LOST = True # 当worker进程意外退出时,task会被放回到队列中
CELERY_ACKS_LATE = True # 只有当worker完成了这个task时,任务才被标记为ack状态
# 并发数
CELERYD_CONCURRENCY = 2