├── UTILS
├── __init__.py
├── config.py
├── config_port.py
├── time_utils.py
├── df_utils.py
├── upload_pic.py
├── db_sheets.py
├── rabbitmq_utils.py
└── utils.py
├── ta-lib-0.4.0-src.tar.gz
├── docker_cmd_save_tushare.sh
├── docker_cmd_user_send.sh
├── docker_cmd_save_policies.sh
├── docker_cmd.sh
├── docker_cmd_phddns.sh
├── docker_cmd_server.sh
├── policies.py
├── .gitignore
├── Dockerfile_phddns
├── Dockerfile_base
├── DATABASE
├── __init__.py
├── _DataBase.py
└── database_pymongo.py
├── Dockerfile_image
├── docker-compose-sys.yml
├── MODEL
├── magic_nine_turns.py
└── macd_5_minute.py
├── save_user_send.py
├── docker-compose.yml
├── save_policies.py
├── func.py
├── README.md
├── .github
└── workflows
│ ├── docker-image.yml
│ └── docker-image_base.yml.bak
├── save_tushare.py
└── server.py
/UTILS/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/UTILS/config.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | VERSION = "0.0.15"
4 | LOGGING_LEVEL = logging.INFO
5 |
--------------------------------------------------------------------------------
/ta-lib-0.4.0-src.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zhangsheng377/stock/HEAD/ta-lib-0.4.0-src.tar.gz
--------------------------------------------------------------------------------
/docker_cmd_save_tushare.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # git pull && python3 save_tushare.py
3 | python3 save_tushare.py
4 |
--------------------------------------------------------------------------------
/docker_cmd_user_send.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # git pull && python3 save_user_send.py
3 | python3 save_user_send.py
4 |
--------------------------------------------------------------------------------
/docker_cmd_save_policies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # git pull && python3 save_policies.py
3 | python3 save_policies.py
4 |
--------------------------------------------------------------------------------
/docker_cmd.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # git pull && gunicorn --bind 0.0.0.0:22222 func
3 | gunicorn --bind 0.0.0.0:22222 func
4 |
--------------------------------------------------------------------------------
/docker_cmd_phddns.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | phddns start && phddns status
3 |
4 | while true
5 | do
6 | sleep 99999
7 | done
--------------------------------------------------------------------------------
/docker_cmd_server.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # git pull && gunicorn --bind 0.0.0.0:5000 server
3 | gunicorn --bind 0.0.0.0:5000 server
4 |
--------------------------------------------------------------------------------
/policies.py:
--------------------------------------------------------------------------------
1 | from MODEL import macd_5_minute, magic_nine_turns
2 |
3 | policies = {
4 | "macd_5_minute": macd_5_minute.handel,
5 | "magic_nine_turns": magic_nine_turns.handel,
6 | }
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .ipynb_checkpoints
3 | __pycache__
4 | *.xlsx
5 | config_aria2.py
6 | UTILS/config_qiniu.py
7 | UTILS/config_rabbitmq.py
8 | ftqq_tokens.py
9 | test*.py
10 | *.ipynb
11 | ta-lib/
12 | tmp/
--------------------------------------------------------------------------------
/UTILS/config_port.py:
--------------------------------------------------------------------------------
1 | mongodb_host = '192.168.10.5'
2 | mongodb_port = 27017
3 |
4 | rabbitmq_host = '192.168.10.5'
5 | rabbitmq_port = 5672
6 |
7 | redis_host = '192.168.10.5'
8 | redis_port = 6379
9 |
10 | user_send_host = '192.168.10.5'
11 | user_send_port = 22222
12 |
--------------------------------------------------------------------------------
/UTILS/time_utils.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 |
4 | def get_rest_seconds():
5 | now = datetime.now()
6 |
7 | today_begin = datetime(now.year, now.month, now.day, 8, 0, 0)
8 | tomorrow_begin = today_begin + timedelta(days=1)
9 |
10 | rest_seconds = (tomorrow_begin - now).seconds
11 | return rest_seconds
12 |
--------------------------------------------------------------------------------
/Dockerfile_phddns:
--------------------------------------------------------------------------------
1 | FROM ubuntu:latest
2 | ENTRYPOINT []
3 |
4 | RUN sed -i s@/archive.ubuntu.com/@/mirrors.aliyun.com/@g /etc/apt/sources.list
5 | RUN apt clean && apt update && apt install git python3 python3-pip wget systemctl -y
6 |
7 | COPY phddns-5.0.0-amd64.deb phddns-5.0.0-amd64.deb
8 | RUN dpkg -i phddns-5.0.0-amd64.deb
9 |
10 | COPY docker_cmd_phddns.sh docker_cmd_phddns.sh
11 |
12 | CMD ["/bin/bash", "docker_cmd_phddns.sh"]
13 |
--------------------------------------------------------------------------------
/Dockerfile_base:
--------------------------------------------------------------------------------
1 | FROM zhangsheng377/ubuntu:latest
2 | # ENTRYPOINT []
3 |
4 | COPY ta-lib-0.4.0-src.tar.gz ta-lib-0.4.0-src.tar.gz
5 | RUN tar -zxvf ta-lib-0.4.0-src.tar.gz && cd /ta-lib && ./configure --prefix=/usr && make && make install
6 |
7 | # RUN pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple
8 | RUN pip3 install numpy pandas requests TA-Lib tabulate pymongo matplotlib qiniu redis tushare pika Flask gunicorn xmltodict
9 |
10 | # CMD ["/bin/bash"]
11 |
--------------------------------------------------------------------------------
/DATABASE/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import unicode_literals
3 |
4 | from UTILS.config_port import mongodb_host, mongodb_port
5 |
6 |
7 | def database_factory(database_name, sheet_name, model="pymongo"):
8 | if model == "pymongo":
9 | from .database_pymongo import DataBasePyMongo
10 | return DataBasePyMongo(database_name=database_name, sheet_name=sheet_name, host=mongodb_host, port=mongodb_port)
11 | else:
12 | raise ValueError('model name: {} can not find.'.format(model))
13 | pass
14 |
--------------------------------------------------------------------------------
/Dockerfile_image:
--------------------------------------------------------------------------------
1 | FROM zhangsheng377/stock_base:latest
2 | # ENTRYPOINT []
3 |
4 | RUN git clone https://github.com/zhangsheng377/stock.git --depth=1
5 | ARG QINIU_ACCESS_KEY
6 | ARG QINIU_SECRET_KEY
7 | RUN echo "access_key = \"$QINIU_ACCESS_KEY\" \n\
8 | secret_key = \"$QINIU_SECRET_KEY\" "\
9 | > /stock/UTILS/config_qiniu.py
10 |
11 | ARG RABBITMQ_USER
12 | ARG RABBITMQ_PASSWORD
13 | RUN echo "rabbitmq_user = \"$RABBITMQ_USER\" \n\
14 | rabbitmq_password = \"$RABBITMQ_PASSWORD\" "\
15 | > /stock/UTILS/config_rabbitmq.py
16 |
17 | WORKDIR /stock
18 |
19 | # CMD ["/bin/bash"]
20 |
--------------------------------------------------------------------------------
/UTILS/df_utils.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 |
3 | def get_minute_df(df) -> pd.DataFrame:
4 | df['minute'] = df['time'].str.slice(stop=5)
5 | df['price'] = df['price'].astype(float)
6 | avg_minute_price = df.groupby('minute')['price'].mean().rename('minute_price')
7 | data_minute_df = df.drop_duplicates(subset=['minute'], keep='last')
8 | data_minute_df = data_minute_df.merge(avg_minute_price, left_on='minute', right_index=True, how='left')
9 | data_minute_df['minute_price'] = data_minute_df['minute_price'].fillna(method='ffill')
10 | return data_minute_df
11 |
--------------------------------------------------------------------------------
/DATABASE/_DataBase.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 |
3 |
4 | class DataBase:
5 | @abstractmethod
6 | def __init__(self, database_name, sheet_name):
7 | pass
8 |
9 | @abstractmethod
10 | def insert(self, document):
11 | pass
12 |
13 | @abstractmethod
14 | def find(self, filter=None, sort=None):
15 | pass
16 |
17 | @abstractmethod
18 | def find_one(self, filter=None, sort=None):
19 | pass
20 |
21 | @abstractmethod
22 | def update_one(self, filter, update):
23 | pass
24 |
25 | @abstractmethod
26 | def delete(self, filter):
27 | pass
28 |
--------------------------------------------------------------------------------
/UTILS/upload_pic.py:
--------------------------------------------------------------------------------
1 | import os
2 | import logging
3 |
4 | from qiniu import Auth, put_file
5 |
6 | from UTILS import config_qiniu # 载入时会加载key
7 | from UTILS.config import LOGGING_LEVEL
8 |
9 | logging.getLogger().setLevel(LOGGING_LEVEL)
10 |
11 |
12 | def upload(file_name):
13 | q = Auth(config_qiniu.access_key, config_qiniu.secret_key)
14 | # 要上传的空间
15 | bucket_name = 'public-bz'
16 |
17 | token = q.upload_token(bucket_name, file_name)
18 | ret, info = put_file(token, file_name, os.path.join('tmp', file_name))
19 | logging.info(f"{ret}, {info}")
20 | # if info.status_code != 200:
21 | # t = config_qiniu.access_key
22 | return
23 |
--------------------------------------------------------------------------------
/docker-compose-sys.yml:
--------------------------------------------------------------------------------
1 | version: "2.4"
2 | services:
3 | redis:
4 | image: redis:latest
5 | container_name: sys_redis
6 | restart: always
7 | volumes:
8 | - /etc/timezone:/etc/timezone:ro
9 | - /etc/localtime:/etc/localtime:ro
10 | - /mnt/nfs/zsd_server/docker/data/redis:/data
11 | ports:
12 | - "6379:6379"
13 | logging:
14 | options:
15 | max-size: "10m"
16 |
17 | mongo:
18 | image: mongo:latest
19 | container_name: sys_mongo
20 | restart: always
21 | volumes:
22 | - /etc/timezone:/etc/timezone:ro
23 | - /etc/localtime:/etc/localtime:ro
24 | - /mnt/nfs/zsd_server/docker/data/mongo:/data
25 | ports:
26 | - "27017:27017"
27 | logging:
28 | options:
29 | max-size: "10m"
30 |
31 | rabbit:
32 | image: rabbitmq:3.9-management
33 | container_name: sys_rabbitmq
34 | restart: always
35 | volumes:
36 | - /etc/timezone:/etc/timezone:ro
37 | - /etc/localtime:/etc/localtime:ro
38 | ports:
39 | - "5672:5672"
40 | - "15672:15672"
41 | logging:
42 | options:
43 | max-size: "10m"
44 |
--------------------------------------------------------------------------------
/DATABASE/database_pymongo.py:
--------------------------------------------------------------------------------
1 | from pymongo import MongoClient, errors
2 |
3 | from ._DataBase import DataBase
4 |
5 |
6 | class DataBasePyMongo(DataBase):
7 | def __init__(self, database_name, sheet_name, host='localhost', port=27017):
8 | super().__init__(database_name, sheet_name)
9 | self._client = MongoClient(host=host, port=port)
10 | self._databese = self._client[database_name]
11 | self._sheet = self._databese[sheet_name]
12 |
13 | def insert(self, document):
14 | """
15 | pymongo sheet.insert_one里面用_id表示key
16 | """
17 | try:
18 | self._sheet.insert_one(document=document)
19 | return True
20 | except errors.DuplicateKeyError as e:
21 | print(e)
22 | pass
23 | return False
24 |
25 | def find(self, filter=None, sort=None):
26 | return list(self._sheet.find(filter=filter, sort=sort))
27 |
28 | def find_one(self, filter=None, sort=None):
29 | return self._sheet.find_one(filter=filter, sort=sort)
30 |
31 | def update_one(self, filter, update):
32 | return self._sheet.update_one(filter=filter, update=update)
33 |
34 | def delete(self, filter):
35 | return self._sheet.delete_many(filter=filter)
36 |
37 | def get_database(self):
38 | return self._databese
39 |
--------------------------------------------------------------------------------
/MODEL/magic_nine_turns.py:
--------------------------------------------------------------------------------
1 | import pandas
2 |
3 | from UTILS.df_utils import get_minute_df
4 |
5 |
6 | def handel(data):
7 | data_df = pandas.DataFrame(data)
8 | if data_df.empty:
9 | raise Exception("data_df is empty")
10 |
11 | data_minute_df = get_minute_df(data_df)
12 |
13 | result_list = []
14 |
15 | count = 0
16 | direction = 'none'
17 | for i in range(4, data_minute_df.shape[0]):
18 | before = data_minute_df.iloc[i - 4]['minute_price']
19 | now = data_minute_df.iloc[i]['minute_price']
20 |
21 | if now > before:
22 | if direction == 'up':
23 | if count == 9:
24 | result_list.append({'time': data_minute_df.iloc[i]['time'],
25 | 'price': data_minute_df.iloc[i]['price'],
26 | '指标': '神奇九转,连续上涨,将大概率下跌',
27 | 'plt': 'go',
28 | })
29 | count = -1
30 | else:
31 | direction = 'up'
32 | count = 0
33 | elif now < before:
34 | if direction == 'down':
35 | if count == 9:
36 | result_list.append({'time': data_minute_df.iloc[i]['time'],
37 | 'price': data_minute_df.iloc[i]['price'],
38 | '指标': '神奇九转,连续下跌,将大概率上涨',
39 | 'plt': 'ro',
40 | })
41 | count = -1
42 | else:
43 | direction = 'down'
44 | count = 0
45 | count += 1
46 |
47 | return result_list
48 |
--------------------------------------------------------------------------------
/save_user_send.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | import requests
5 |
6 | from UTILS.config_port import user_send_host, user_send_port
7 | from UTILS.db_sheets import get_users
8 | from UTILS.rabbitmq_utils import RabbitMqAgent, user_send_channel
9 | from UTILS.config import VERSION, LOGGING_LEVEL
10 |
11 | logging.getLogger().setLevel(LOGGING_LEVEL)
12 |
13 | user_result_len_map = {}
14 |
15 |
16 | def handle_user_send(ch, method, properties, body):
17 | logging.info(f"{body}")
18 | try:
19 | json_body = json.loads(body)
20 | stock_id = json_body['stock_id']
21 | policy_name = json_body['policy_name']
22 |
23 | users = get_users()
24 | for user in users:
25 | if stock_id not in user['stocks']:
26 | continue
27 | if policy_name not in user['policies']:
28 | continue
29 |
30 | key = user['_id'] + '_' + stock_id
31 | old_result_len = user_result_len_map.get(key, 0)
32 |
33 | url = f'http://{user_send_host}:{user_send_port}/send_user'
34 | data = {'user_id': json.dumps(user['_id']), 'stock_id': json.dumps(stock_id),
35 | 'old_result_len': json.dumps(old_result_len)} # 将携带的参数传给params
36 | re_len = requests.get(url, params=data).json()
37 | logging.info(f"old_result_len:{old_result_len} re_len:{re_len}")
38 | user_result_len_map[key] = re_len
39 |
40 | except Exception:
41 | logging.warning("save policy error.", exc_info=True)
42 |
43 |
44 | if __name__ == "__main__":
45 | logging.info(f"VERSION: {VERSION}")
46 | # print(json.dumps({'stock_id': '688180', 'policy_name': 'magic_nine_turns'}))
47 | with RabbitMqAgent() as rabbitmq:
48 | rabbitmq.start_consuming(queue_name=user_send_channel, func_callback=handle_user_send)
49 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "2.4"
2 | services:
3 | save_tushare:
4 | image: zhangsheng377/stock_image:latest
5 | container_name: stock_save_tushare
6 | restart: always
7 | volumes:
8 | - /etc/timezone:/etc/timezone:ro
9 | - /etc/localtime:/etc/localtime:ro
10 | logging:
11 | options:
12 | max-size: "10m"
13 | entrypoint:
14 | - /bin/bash
15 | - docker_cmd_save_tushare.sh
16 |
17 | stock:
18 | image: zhangsheng377/stock_image:latest
19 | container_name: stock_stock
20 | restart: always
21 | volumes:
22 | - /etc/timezone:/etc/timezone:ro
23 | - /etc/localtime:/etc/localtime:ro
24 | ports:
25 | - "22222:22222"
26 | logging:
27 | options:
28 | max-size: "10m"
29 | entrypoint:
30 | - /bin/bash
31 | - docker_cmd.sh
32 |
33 | save_policies:
34 | image: zhangsheng377/stock_image:latest
35 | container_name: stock_save_policies
36 | restart: always
37 | volumes:
38 | - /etc/timezone:/etc/timezone:ro
39 | - /etc/localtime:/etc/localtime:ro
40 | logging:
41 | options:
42 | max-size: "10m"
43 | entrypoint:
44 | - /bin/bash
45 | - docker_cmd_save_policies.sh
46 |
47 | server:
48 | image: zhangsheng377/stock_image:latest
49 | container_name: stock_server
50 | restart: always
51 | volumes:
52 | - /etc/timezone:/etc/timezone:ro
53 | - /etc/localtime:/etc/localtime:ro
54 | ports:
55 | - "5000:5000"
56 | logging:
57 | options:
58 | max-size: "10m"
59 | entrypoint:
60 | - /bin/bash
61 | - docker_cmd_server.sh
62 |
63 | user_send:
64 | image: zhangsheng377/stock_image:latest
65 | container_name: stock_user_send
66 | restart: always
67 | volumes:
68 | - /etc/timezone:/etc/timezone:ro
69 | - /etc/localtime:/etc/localtime:ro
70 | logging:
71 | options:
72 | max-size: "10m"
73 | entrypoint:
74 | - /bin/bash
75 | - docker_cmd_user_send.sh
76 |
--------------------------------------------------------------------------------
/save_policies.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from UTILS.db_sheets import db_redis, get_stock_data
5 | from UTILS.rabbitmq_utils import RabbitMqAgent, polices_channel, user_send_channel
6 | from UTILS.time_utils import get_rest_seconds
7 | from policies import policies
8 | from UTILS.config import VERSION, LOGGING_LEVEL
9 |
10 | logging.getLogger().setLevel(LOGGING_LEVEL)
11 |
12 | stock_police_result_len_map = {}
13 |
14 |
15 | def handle_police(ch, method, properties, body):
16 | logging.debug(f"{body}")
17 | try:
18 | json_body = json.loads(body)
19 | stock_id = json_body['stock_id']
20 | policy_name = json_body['policy_name']
21 |
22 | data = get_stock_data(stock_id)
23 | if len(data) <= 0:
24 | logging.warning(f"get_stock_data ken <= 0. len={len(data)}")
25 | return
26 |
27 | logging.debug(f"had data: {len(data)}")
28 | result_list = policies[policy_name](data)
29 |
30 | key = stock_id + '_' + policy_name
31 | old_result_len = stock_police_result_len_map.get(key, 0)
32 | if len(result_list) == old_result_len:
33 | logging.debug(f"len(result_list) == old_result_len. old_result_len={old_result_len}")
34 | return
35 |
36 | db_redis.set(key, json.dumps(result_list), ex=get_rest_seconds())
37 | stock_police_result_len_map[key] = len(result_list)
38 | with RabbitMqAgent() as rabbitmq:
39 | rabbitmq.put(queue_name=user_send_channel, route_key=user_send_channel,
40 | message_str=json.dumps({'stock_id': stock_id, 'policy_name': policy_name}))
41 |
42 | logging.debug(f"old_result_len={old_result_len}, len(result_list)={len(result_list)}")
43 |
44 | except Exception:
45 | logging.warning("save policy error.", exc_info=True)
46 |
47 |
48 | if __name__ == "__main__":
49 | logging.info(f"VERSION: {VERSION}")
50 | with RabbitMqAgent() as rabbitmq:
51 | rabbitmq.start_consuming(queue_name=polices_channel, func_callback=handle_police)
52 |
--------------------------------------------------------------------------------
/MODEL/macd_5_minute.py:
--------------------------------------------------------------------------------
1 | import pandas
2 | import numpy
3 | import talib
4 |
5 | from UTILS.df_utils import get_minute_df
6 |
7 |
8 | def MACD_CN(close, fastperiod=12, slowperiod=26, signalperiod=9):
9 | macdDIFF, macdDEA, macd = talib.MACDEXT(close, fastperiod=fastperiod, fastmatype=1, slowperiod=slowperiod,
10 | slowmatype=1, signalperiod=signalperiod, signalmatype=1)
11 | macd = macd * 2
12 | return macdDIFF, macdDEA, macd
13 |
14 |
15 | def handel(data):
16 | data_df = pandas.DataFrame(data)
17 | if data_df.empty:
18 | raise Exception("data_df is empty")
19 |
20 | data_minute_df = get_minute_df(data_df)
21 |
22 | data_minute_df['tmp'] = data_minute_df['minute'].str.slice(start=4, stop=5)
23 | data_5_minute_df = pandas.concat([data_minute_df[data_minute_df['tmp'] == '0'],
24 | data_minute_df[data_minute_df['tmp'] == '5']])
25 | data_5_minute_df = data_5_minute_df.sort_values(by='minute', ascending=True)
26 |
27 | data_5_minute_df['macdDIFF'], data_5_minute_df['macdDEA'], data_5_minute_df['macd'] = MACD_CN(
28 | data_5_minute_df['price'])
29 |
30 | result_list = []
31 |
32 | for i in range(1, data_5_minute_df.shape[0]):
33 | before = data_5_minute_df.iloc[i - 1]['macd']
34 | now = data_5_minute_df.iloc[i]['macd']
35 |
36 | if numpy.sign(before) != numpy.sign(now):
37 | if now > 0:
38 | result_list.append({'time': data_5_minute_df.iloc[i]['time'],
39 | 'price': data_5_minute_df.iloc[i]['price'],
40 | '指标': 'macd信号,将持续上涨',
41 | 'plt': 'rx',
42 | })
43 | elif now < 0:
44 | result_list.append({'time': data_5_minute_df.iloc[i]['time'],
45 | 'price': data_5_minute_df.iloc[i]['price'],
46 | '指标': 'macd信号,将持续下跌',
47 | 'plt': 'gx',
48 | })
49 | return result_list
50 |
--------------------------------------------------------------------------------
/func.py:
--------------------------------------------------------------------------------
1 | import json
2 | import datetime
3 |
4 | from flask import Flask, request
5 |
6 | from UTILS.utils import send_result, get_policy_datas
7 | from UTILS.config import VERSION, LOGGING_LEVEL
8 | from UTILS.db_sheets import get_users, get_stock_data
9 | from UTILS.config_port import user_send_port
10 |
11 | application = Flask(__name__)
12 | # application.debug = True
13 | application.logger.setLevel(LOGGING_LEVEL)
14 |
15 |
16 | def ftqq_token_is_valid(ftqq_token):
17 | return ftqq_token is not None and ftqq_token != ''
18 |
19 |
20 | def get_user(user_id):
21 | users = get_users()
22 | for user in users:
23 | if user['_id'] == user_id:
24 | return user
25 | return None
26 |
27 |
28 | @application.route('/')
29 | def hello_world():
30 | return 'Hello, World!' + '
' + str(datetime.datetime.now())
31 |
32 |
33 | @application.route('/send_user', methods=["GET"])
34 | def send_user():
35 | # 以GET方式传参数,通过args取值
36 | user_id = json.loads(request.args['user_id'])
37 | stock_id = json.loads(request.args['stock_id'])
38 | old_result_len = json.loads(request.args['old_result_len'])
39 | result_len = _send_user(user_id, stock_id, old_result_len)
40 | return json.dumps(result_len)
41 |
42 |
43 | def _send_user(user_id, stock_id, old_result_len):
44 | application.logger.info(f"{user_id}, {stock_id}, {old_result_len}")
45 | user = get_user(user_id)
46 | if user is None:
47 | application.logger.warning(f"user is None. \n\n\n")
48 | return old_result_len
49 |
50 | ftqq_token = user['ftqq_token']
51 | if not ftqq_token_is_valid(ftqq_token):
52 | application.logger.warning(f"not ftqq_token_is_valid(ftqq_token). \n\n\n")
53 | return old_result_len
54 |
55 | try:
56 | data = get_stock_data(stock_id)
57 |
58 | result_list = get_policy_datas(stock_id, user['policies'])
59 | if len(result_list) == old_result_len:
60 | application.logger.warning(f"len(result_list) == old_result_len. old_result_len={old_result_len}. \n\n\n")
61 | return old_result_len
62 |
63 | return send_result(stock_id, data, result_list, ftqq_token, old_result_len)
64 |
65 | except Exception as e:
66 | if e.args[0] == 'data_df is empty':
67 | application.logger.info("data_df is empty. \n\n\n")
68 | else:
69 | application.logger.warning("handle data error.", exc_info=True)
70 | return old_result_len
71 |
72 |
73 | if __name__ == "__main__":
74 | application.logger.info(f"VERSION: {VERSION}")
75 | # application.run(host="0.0.0.0", port=user_send_port)
76 | application.run(host="0.0.0.0")
77 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TODO
2 |
3 | - [ ] 用守护进程起多进程的方式,拉起服务。节约容器的使用。
4 |
5 | --------------------------------------------------
6 |
7 | ## redis
8 |
9 | ```
10 | docker run -d --name redis -v /opt/docker/data/redis:/data -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro --restart always -p 6379:6379 redis
11 |
12 | redis-cli -h 127.0.0.1 -p 6379
13 | ```
14 |
15 | ## mongo
16 |
17 | ```
18 | docker run -d --name mongo -v /opt/docker/data/mongo:/data -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro --restart always -p 27017:27017 mongo
19 |
20 | mongo 127.0.0.1:27017
21 | show dbs
22 | use tushare
23 | db.sh_600196.insert({"test":"testdb"})
24 |
25 | mongodump -h 127.0.0.1:27017 -d tushare -o /data/mongodump/tushare
26 | mongorestore -h 127.0.0.1:27017 -d tushare --dir /data/mongodump/tushare
27 | ```
28 |
29 | ## save_tushare
30 |
31 | ```
32 | docker build -t zhangsheng377/save_tushare -f Dockerfile_tushare .
33 |
34 | docker run -ti -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro zhangsheng377/save_tushare
35 |
36 | docker run -d --name save_tushare -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro --restart always zhangsheng377/save_tushare
37 | ```
38 |
39 | ## stock
40 |
41 | ```
42 | docker build -t zhangsheng377/stock .
43 |
44 | docker run -ti zhangsheng377/stock
45 | docker run -ti zhangsheng377/stock /bin/bash
46 | docker run -ti -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro zhangsheng377/stock
47 |
48 | docker run -d --name stock -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro --restart always zhangsheng377/stock
49 | ```
50 |
51 | ## save_policies
52 |
53 | ```
54 | docker build -t zhangsheng377/save_policies -f Dockerfile_policy .
55 |
56 | docker run -ti -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro zhangsheng377/save_policies
57 |
58 | docker run -d --name save_policies -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro --restart always zhangsheng377/save_policies
59 | ```
60 |
61 | ## 部署花生壳 phddns
62 |
63 | ```
64 | docker build -t zhangsheng377/phddns -f Dockerfile_phddns .
65 |
66 | docker run -ti -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro zhangsheng377/phddns /bin/bash
67 |
68 | docker run -d --name phddns -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro --restart always zhangsheng377/phddns
69 | ```
70 |
71 | ## 部署微信服务器
72 |
73 | ```
74 | docker build -t zhangsheng377/server -f Dockerfile_server .
75 |
76 | docker run -ti -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro zhangsheng377/server /bin/bash
77 |
78 | docker run -d --name server -v /etc/timezone:/etc/timezone:ro -v /etc/localtime:/etc/localtime:ro -p 5000:5000 --restart always zhangsheng377/server
79 | ```
80 |
--------------------------------------------------------------------------------
/.github/workflows/docker-image.yml:
--------------------------------------------------------------------------------
1 | name: Docker Image CI
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 |
9 | env:
10 | # Use docker.io for Docker Hub if empty
11 | REGISTRY: docker.io
12 | #
13 | IMAGE_NAME: stock_image
14 | # DockerFile
15 | DOCKER_FILE: Dockerfile_image
16 |
17 |
18 | jobs:
19 |
20 | build:
21 |
22 | runs-on: ubuntu-latest
23 |
24 | steps:
25 | - name: Checkout repository
26 | uses: actions/checkout@v2
27 |
28 | # Login against a Docker registry except on PR
29 | # https://github.com/docker/login-action
30 | - name: Log into registry ${{ env.REGISTRY }}
31 | if: github.event_name != 'pull_request'
32 | uses: docker/login-action@v1
33 | with:
34 | registry: ${{ env.REGISTRY }}
35 | username: ${{ secrets.DOCKER_HUB_USER }}
36 | password: ${{ secrets.DOCKER_HUB_TOKEN }}
37 |
38 | # Extract metadata (tags, labels) for Docker
39 | # https://github.com/docker/metadata-action
40 | - name: Extract Docker metadata
41 | id: meta
42 | uses: docker/metadata-action@v3
43 | with:
44 | images: ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}
45 |
46 | - name: Get Time
47 | id: time
48 | uses: nanzm/get-time-action@v1.1
49 | with:
50 | timeZone: 8
51 | format: 'YYYY-MM-DD-HH-mm-ss'
52 |
53 | # Build and push Docker image with Buildx (don't push on PR)
54 | # https://github.com/docker/build-push-action
55 | # - name: Build and push Docker image
56 | # uses: docker/build-push-action@v2
57 | # with:
58 | # context: .
59 | # file: ${{ env.DOCKER_FILE }}
60 | # build-args:
61 | # QINIU_ACCESS_KEY: ${{ secrets.QINIU_ACCESS_KEY }}
62 | # QINIU_SECRET_KEY: ${{ secrets.QINIU_SECRET_KEY }}
63 | # push: ${{ github.event_name != 'pull_request' }}
64 | # # tags: ${{ steps.meta.outputs.tags }}-${{ steps.time.outputs.time }}
65 | # tags: ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}:latest
66 | # labels: ${{ steps.meta.outputs.labels }}
67 |
68 | - name: Build and push Docker image
69 | run: |
70 | docker build . --file ${{ env.DOCKER_FILE }} --tag ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}:latest --build-arg QINIU_ACCESS_KEY=${{ secrets.QINIU_ACCESS_KEY }} --build-arg QINIU_SECRET_KEY=${{ secrets.QINIU_SECRET_KEY }} --build-arg RABBITMQ_USER=${{ secrets.RABBITMQ_USER }} --build-arg RABBITMQ_PASSWORD=${{ secrets.RABBITMQ_PASSWORD }}
71 |
72 | docker push ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}:latest
73 |
--------------------------------------------------------------------------------
/.github/workflows/docker-image_base.yml.bak:
--------------------------------------------------------------------------------
1 | name: Docker Image CI
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 |
9 | env:
10 | # Use docker.io for Docker Hub if empty
11 | REGISTRY: docker.io
12 | #
13 | IMAGE_NAME: stock_base
14 | # DockerFile
15 | DOCKER_FILE: Dockerfile_base
16 |
17 |
18 | jobs:
19 |
20 | build:
21 |
22 | runs-on: ubuntu-latest
23 |
24 | steps:
25 | - name: Checkout repository
26 | uses: actions/checkout@v2
27 |
28 | # Login against a Docker registry except on PR
29 | # https://github.com/docker/login-action
30 | - name: Log into registry ${{ env.REGISTRY }}
31 | if: github.event_name != 'pull_request'
32 | uses: docker/login-action@v1
33 | with:
34 | registry: ${{ env.REGISTRY }}
35 | username: ${{ secrets.DOCKER_HUB_USER }}
36 | password: ${{ secrets.DOCKER_HUB_TOKEN }}
37 |
38 | # Extract metadata (tags, labels) for Docker
39 | # https://github.com/docker/metadata-action
40 | - name: Extract Docker metadata
41 | id: meta
42 | uses: docker/metadata-action@v3
43 | with:
44 | images: ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}
45 |
46 | - name: Get Time
47 | id: time
48 | uses: nanzm/get-time-action@v1.1
49 | with:
50 | timeZone: 8
51 | format: 'YYYY-MM-DD-HH-mm-ss'
52 |
53 | # Build and push Docker image with Buildx (don't push on PR)
54 | # https://github.com/docker/build-push-action
55 | # - name: Build and push Docker image
56 | # uses: docker/build-push-action@v2
57 | # with:
58 | # context: .
59 | # file: ${{ env.DOCKER_FILE }}
60 | # build-args:
61 | # QINIU_ACCESS_KEY: ${{ secrets.QINIU_ACCESS_KEY }}
62 | # QINIU_SECRET_KEY: ${{ secrets.QINIU_SECRET_KEY }}
63 | # push: ${{ github.event_name != 'pull_request' }}
64 | # # tags: ${{ steps.meta.outputs.tags }}-${{ steps.time.outputs.time }}
65 | # tags: ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}:latest
66 | # labels: ${{ steps.meta.outputs.labels }}
67 |
68 | - name: Build and push Docker image
69 | run: |
70 | docker build . --file ${{ env.DOCKER_FILE }} --tag ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}:latest --build-arg QINIU_ACCESS_KEY=${{ secrets.QINIU_ACCESS_KEY }} --build-arg QINIU_SECRET_KEY=${{ secrets.QINIU_SECRET_KEY }} --build-arg RABBITMQ_USER=${{ secrets.RABBITMQ_USER }} --build-arg RABBITMQ_PASSWORD=${{ secrets.RABBITMQ_PASSWORD }}
71 |
72 | docker push ${{ env.REGISTRY }}/${{ secrets.DOCKER_HUB_USER }}/${{ env.IMAGE_NAME }}:latest
73 |
--------------------------------------------------------------------------------
/UTILS/db_sheets.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 | from datetime import datetime
4 |
5 | import redis
6 |
7 | from DATABASE import database_factory
8 | from UTILS.config_port import redis_host, redis_port
9 | from UTILS.time_utils import get_rest_seconds
10 |
11 |
12 | def get_db_sheet(database_name, sheet_name):
13 | return database_factory(database_name=database_name, sheet_name=sheet_name, model="pymongo")
14 |
15 |
16 | def _get_data(key: str, get_db_func):
17 | data_redis = db_redis.get(key)
18 | if data_redis is None:
19 | data = get_db_func()
20 | db_redis.set(key, json.dumps(data))
21 | else:
22 | data = json.loads(data_redis)
23 | return data
24 |
25 |
26 | def get_db_users():
27 | user_db_sheet = get_db_sheet(database_name="user", sheet_name="user")
28 | return user_db_sheet.find()
29 |
30 |
31 | def get_users():
32 | return _get_data('users', get_db_users)
33 |
34 |
35 | def get_user(user_id):
36 | users = get_users()
37 | for user in users:
38 | if user_id == user['_id']:
39 | return user
40 | return None
41 |
42 |
43 | def update_redis_users_from_db():
44 | data = get_db_users()
45 | db_redis.set('users', json.dumps(data))
46 |
47 |
48 | def insert_users(document):
49 | user_db_sheet = get_db_sheet(database_name="user", sheet_name="user")
50 | if user_db_sheet.insert(document=document):
51 | users = get_users()
52 | db_redis.set('users', json.dumps(users))
53 | return True
54 | return False
55 |
56 |
57 | def update_one_user(filter, update):
58 | user_db_sheet = get_db_sheet(database_name="user", sheet_name="user")
59 | result = user_db_sheet.update_one(filter=filter, update=update)
60 | update_redis_users_from_db()
61 | return result
62 |
63 |
64 | def get_today_tick_data(db_sheet):
65 | date_str = datetime.now().strftime("%Y-%m-%d")
66 | # date_str = '2020-12-25'
67 | regex_str = '^' + date_str
68 | data = db_sheet.find(filter={'_id': {"$regex": regex_str}}, sort=[('_id', 1)])
69 | return data
70 |
71 |
72 | def get_stock_data(stock_id):
73 | def get_db_stock_data():
74 | db_sheet = get_db_sheet(database_name="tushare", sheet_name="sh_" + stock_id)
75 | return get_today_tick_data(db_sheet)
76 |
77 | return _get_data(stock_id, get_db_stock_data)
78 |
79 |
80 | def add_stock_data(stock_id, insert_data_json):
81 | db_sheet = get_db_sheet(database_name="tushare", sheet_name="sh_" + stock_id)
82 | if db_sheet.insert(insert_data_json):
83 | data = get_today_tick_data(db_sheet)
84 | db_redis.set(stock_id, json.dumps(data), ex=get_rest_seconds())
85 | return True
86 | return False
87 |
88 |
89 | def get_stock_ids():
90 | def get_db_stock_ids():
91 | database = get_db_sheet(database_name="tushare", sheet_name="sh_600196").get_database()
92 | return database.list_collection_names(filter={"name": re.compile('^sh_\d{6}')})
93 |
94 | # return _get_data('stock_ids', get_db_stock_ids)
95 | # return get_db_stock_ids()
96 | stock_ids = {stock_id[3:] for stock_id in get_db_stock_ids()}
97 | users = get_users()
98 | for user in users:
99 | stock_ids = stock_ids | set(user['stocks'])
100 | return stock_ids
101 |
102 |
103 | db_redis = redis.Redis(host=redis_host, port=redis_port, db=0)
104 |
105 | if __name__ == '__main__':
106 | users = get_users()
107 | print(users)
108 | print(type(users), type(users[0]))
109 |
110 | print(get_stock_ids())
111 |
--------------------------------------------------------------------------------
/UTILS/rabbitmq_utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import sys
4 | import traceback
5 |
6 | import pika
7 |
8 | from UTILS import config_rabbitmq
9 | from UTILS.config_port import rabbitmq_host, rabbitmq_port
10 | from UTILS.config import LOGGING_LEVEL
11 |
12 | logging.getLogger().setLevel(LOGGING_LEVEL)
13 |
14 | polices_channel = 'stock_polices'
15 | user_send_channel = 'stock_user_send'
16 |
17 |
18 | class RabbitMqAgent(object):
19 | def __init__(self):
20 | self._host = rabbitmq_host # broker IP
21 | self._port = rabbitmq_port # broker port
22 | self._vhost = '/' # vhost
23 | self._credentials = pika.PlainCredentials(config_rabbitmq.rabbitmq_user, config_rabbitmq.rabbitmq_password)
24 | self._connection = None
25 |
26 | def connect(self):
27 | try:
28 | # 连接RabbitMQ的参数对象
29 | parameter = pika.ConnectionParameters(host=self._host,
30 | port=self._port,
31 | virtual_host=self._vhost,
32 | credentials=self._credentials,
33 | heartbeat=10)
34 | self._connection = pika.BlockingConnection(parameter) # 建立连接
35 | except Exception as e:
36 | traceback.print_exc()
37 | try:
38 | sys.exit(0)
39 | except SystemExit:
40 | os._exit(0)
41 |
42 | def __enter__(self):
43 | self.connect()
44 | return self
45 |
46 | def put(self, message_str, queue_name, route_key, exchange=''):
47 | if self._connection is None:
48 | logging.error(f"put without connect!")
49 | return
50 |
51 | channel = self._connection.channel() # 获取channel
52 | channel.queue_declare(queue=queue_name) # 申明使用的queue
53 |
54 | # 调用basic_publish方法向RabbitMQ发送数据, 这个方法应该只支持str类型的数据
55 | channel.basic_publish(
56 | exchange=exchange, # 指定exchange
57 | routing_key=route_key, # 指定路由
58 | body=message_str # 具体发送的数据
59 | )
60 |
61 | def start_consuming(self, queue_name, func_callback):
62 | if self._connection is None:
63 | logging.error(f"start_consuming without connect!")
64 | return
65 | channel = self._connection.channel()
66 | channel.queue_declare(queue=queue_name)
67 |
68 | # 调用basic_consume方法,可以传入一个回调函数
69 | channel.basic_consume(on_message_callback=func_callback,
70 | queue=queue_name,
71 | auto_ack=True)
72 | channel.start_consuming() # 相当于run_forever(), 当Queue中没有数据,则一直阻塞等待
73 |
74 | def close(self):
75 | print("close")
76 | """关闭RabbitMQ的连接"""
77 | if self._connection is not None:
78 | self._connection.close()
79 |
80 | def __exit__(self, exc_type, exc_val, exc_tb):
81 | self.close()
82 |
83 |
84 | if __name__ == "__main__":
85 | with RabbitMqAgent() as rabbitmq:
86 | rabbitmq.put(queue_name='hello', route_key='hello', message_str='Hello World!')
87 | print(" [x] Sent 'Hello World!'")
88 |
89 |
90 | def callback(ch, method, properties, body):
91 | print(" [x] Received %r" % body)
92 |
93 |
94 | try:
95 | with RabbitMqAgent() as rabbitmq:
96 | rabbitmq.start_consuming(queue_name='hello', func_callback=callback)
97 | except KeyboardInterrupt:
98 | print('Interrupted')
99 | try:
100 | sys.exit(0)
101 | except SystemExit:
102 | os._exit(0)
103 |
--------------------------------------------------------------------------------
/UTILS/utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import uuid
4 | from datetime import datetime
5 | import logging
6 |
7 | import pandas
8 | import requests
9 | import matplotlib.pyplot as plt
10 |
11 | from UTILS.config import LOGGING_LEVEL
12 | from UTILS.upload_pic import upload
13 | from UTILS.db_sheets import db_redis
14 |
15 | logging.getLogger().setLevel(LOGGING_LEVEL)
16 |
17 |
18 | def get_stock_name_map():
19 | stock_name_map_ = db_redis.get('stock_name_map')
20 | if not stock_name_map_:
21 | return {}
22 | stock_name_map = json.loads(stock_name_map_)
23 | return stock_name_map
24 |
25 |
26 | def plot_result(data, data_result_df, file_name):
27 | data_df = pandas.DataFrame(data)
28 | if data_df.empty:
29 | raise Exception("data_df is empty")
30 | data_df['minute'] = data_df['time'].str.slice(stop=5)
31 | data_minute_df = data_df.drop_duplicates(subset=['minute'], keep='last')
32 |
33 | data_plt = data_minute_df[['time', 'price']]
34 | data_plt['time'] = pandas.to_datetime(data_plt['time'])
35 | data_plt['price'] = pandas.to_numeric(data_plt['price'])
36 | plt.plot(data_plt['time'], data_plt['price'])
37 |
38 | data_plt = data_result_df[['time', 'price', 'plt']]
39 | data_plt['time'] = pandas.to_datetime(data_plt['time'])
40 | data_plt['price'] = pandas.to_numeric(data_plt['price'])
41 | for index, row in data_plt.T.iteritems():
42 | plt.plot(row['time'], row['price'], row['plt'], markersize=10)
43 | # plt.show()
44 |
45 | if not os.path.exists('tmp'):
46 | os.makedirs('tmp')
47 | plt.savefig(os.path.join('tmp', file_name))
48 | plt.close()
49 |
50 | upload(file_name + ".png")
51 |
52 |
53 | def send_result(stock_id, data, result_list, ftqq_token, old_result_len):
54 | data_result_df = pandas.DataFrame(result_list)
55 | if len(data) > 0 and not data_result_df.empty and data_result_df.shape[0] != old_result_len:
56 | data_result_df = data_result_df.sort_values(by='time', ascending=True)
57 | # print(data_result_df)
58 | # print(old_result_len)
59 |
60 | file_name = str(uuid.uuid1())
61 | try:
62 | plot_result(data, data_result_df, file_name)
63 | except Exception as e:
64 | logging.warning(e)
65 |
66 | data_result_df[' '] = ' '
67 | data_result_df = data_result_df[['time', ' ', 'price', ' ', '指标']]
68 | try:
69 | result_markdown = data_result_df.to_markdown(index=False)
70 | except Exception as e:
71 | result_markdown = data_result_df.to_markdown(showindex=False)
72 | result_markdown += "\n\n".format(file_name)
73 | logging.info(result_markdown)
74 |
75 | stock_name_map = get_stock_name_map()
76 | res = requests.post('https://sc.ftqq.com/{}.send'.format(ftqq_token),
77 | data={'text': stock_name_map[stock_id] + " " + stock_id,
78 | 'desp': result_markdown + "\n\n" + datetime.now().strftime(
79 | "%Y-%m-%d %H:%M:%S")})
80 | logging.info(res.text)
81 |
82 | return data_result_df.shape[0]
83 |
84 |
85 | def is_stock_time():
86 | now_hour = int(datetime.now().strftime('%H'))
87 | if 8 <= now_hour <= 16:
88 | return True
89 | return False
90 |
91 |
92 | def get_policy_data(stock_id, policy_name):
93 | data = db_redis.get(stock_id + '_' + policy_name)
94 | if data is None:
95 | data = '[]'
96 | return json.loads(data)
97 |
98 |
99 | def get_policy_datas(stock_id, policy_names):
100 | result_list = []
101 | for policy_name in policy_names:
102 | result_list.extend(get_policy_data(stock_id, policy_name))
103 | return result_list
104 |
--------------------------------------------------------------------------------
/save_tushare.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import sched
4 | import threading
5 | import time
6 |
7 | import tushare as ts
8 |
9 | from UTILS.db_sheets import get_db_sheet, add_stock_data, get_stock_ids, db_redis
10 | from UTILS.rabbitmq_utils import RabbitMqAgent, polices_channel
11 | from policies import policies
12 | from UTILS.utils import is_stock_time, get_stock_name_map
13 | from UTILS.config import VERSION, LOGGING_LEVEL
14 |
15 | logging.getLogger().setLevel(LOGGING_LEVEL)
16 |
17 | schdule = sched.scheduler(time.time, time.sleep)
18 |
19 | stock_locks = {}
20 |
21 | '''
22 | 0:name,股票名字
23 | 1:open,今日开盘价
24 | 2:pre_close,昨日收盘价
25 | 3:price,当前价格
26 | 4:high,今日最高价
27 | 5:low,今日最低价
28 | 6:bid,竞买价,即“买一”报价
29 | 7:ask,竞卖价,即“卖一”报价
30 | 8:volume,成交量 maybe you need do volume/100
31 | 9:amount,成交金额(元 CNY)
32 | 10:b1_v,委买一(笔数 bid volume)
33 | 11:b1_p,委买一(价格 bid price)
34 | 12:b2_v,“买二”
35 | 13:b2_p,“买二”
36 | 14:b3_v,“买三”
37 | 15:b3_p,“买三”
38 | 16:b4_v,“买四”
39 | 17:b4_p,“买四”
40 | 18:b5_v,“买五”
41 | 19:b5_p,“买五”
42 | 20:a1_v,委卖一(笔数 ask volume)
43 | 21:a1_p,委卖一(价格 ask price)
44 | ...
45 | 30:date,日期;
46 | 31:time,时间;
47 | '''
48 |
49 |
50 | def declare_polices_handle(stock_id):
51 | with RabbitMqAgent() as rabbitmq:
52 | for (policy_name, policy_handle) in policies.items():
53 | rabbitmq.put(queue_name=polices_channel, route_key=polices_channel,
54 | message_str=json.dumps({'stock_id': stock_id, 'policy_name': policy_name}))
55 |
56 |
57 | def add_one_stock_record(stock_id, last_time):
58 | try:
59 | df = ts.get_realtime_quotes(stock_id).tail(1) # Single stock symbol
60 | data_dict = df.to_dict()
61 | data_price = data_dict['price'][0]
62 | data_time = data_dict['time'][0]
63 | if data_price != "0.000" and data_time != last_time:
64 | last_time = data_time
65 |
66 | data_json_str = df.to_json(orient='records')[1:-1]
67 | data_json = json.loads(data_json_str)
68 |
69 | data_json['_id'] = data_json['date'] + " " + data_json['time']
70 | logging.info(f"{data_json}")
71 | if add_stock_data(stock_id, data_json):
72 | logging.info(f"add_one_stock_record: {stock_id}.")
73 | declare_polices_handle(stock_id)
74 | return last_time, True
75 | else:
76 | logging.warning("add_stock_data error.")
77 | except Exception:
78 | logging.warning("add_stock error.", exc_info=True)
79 |
80 | return last_time, False
81 |
82 |
83 | def stock_spider(stock_id, last_time):
84 | with stock_locks[stock_id]:
85 | try:
86 | if is_stock_time():
87 | last_time, result = add_one_stock_record(stock_id, last_time)
88 | if result:
89 | logging.info('插入成功\n')
90 | else:
91 | logging.debug('已经存在于数据库\n')
92 | except Exception as e:
93 | logging.warning("save tushare error.", e)
94 |
95 | # logging.info(f"stock_spider {stock_id}")
96 | schdule.enter(1, 0, stock_spider, (stock_id, last_time))
97 |
98 |
99 | def set_stock_name_map(stock_id):
100 | def get_db_stock_name():
101 | stock_db_sheet = get_db_sheet(database_name="tushare", sheet_name='sh_' + stock_id)
102 | data_one = stock_db_sheet.find_one()
103 | if data_one is None or 'name' not in data_one:
104 | schdule.enter(60, 0, set_stock_name_map, (stock_id,))
105 | return stock_id
106 | return data_one['name']
107 |
108 | stock_name_map = get_stock_name_map()
109 | stock_name_map[stock_id] = get_db_stock_name()
110 | db_redis.set("stock_name_map", json.dumps(stock_name_map))
111 |
112 |
113 | def discover_stock():
114 | try:
115 | stock_ids = get_stock_ids()
116 | for stock_id in stock_ids:
117 | if stock_id not in stock_locks:
118 | logging.info(f"discover stock: {stock_id}")
119 | set_stock_name_map(stock_id)
120 | stock_locks[stock_id] = threading.Lock()
121 | schdule.enter(0, 0, stock_spider, (stock_id, None))
122 | except Exception as e:
123 | logging.warning("discover_stock error.", e)
124 | schdule.enter(10, 0, discover_stock, )
125 |
126 |
127 | if __name__ == "__main__":
128 | logging.info(f"VERSION: {VERSION}")
129 | schdule.enter(0, 0, discover_stock, )
130 | schdule.run()
131 | # print(ts.get_realtime_quotes('000726'))
132 |
--------------------------------------------------------------------------------
/server.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import json
3 | import time
4 | import requests
5 |
6 | from flask import Flask, request
7 | import xmltodict
8 |
9 | from UTILS.db_sheets import get_users, insert_users, update_one_user
10 | from UTILS.config_port import user_send_host, user_send_port
11 | from UTILS.config import VERSION, LOGGING_LEVEL
12 |
13 | application = Flask(__name__)
14 | # application.debug = True
15 | application.logger.setLevel(LOGGING_LEVEL)
16 |
17 |
18 | def get_filter_users(filter: dict):
19 | users = get_users()
20 | result = []
21 | for user in users:
22 | is_find = True
23 | for key, value in filter.items():
24 | if key not in user or user[key] != value:
25 | is_find = False
26 | break
27 | if is_find:
28 | result.append(user)
29 | return result
30 |
31 |
32 | @application.route('/')
33 | def hello_world():
34 | return 'Hello, World!' + '
' + str(datetime.datetime.now())
35 |
36 |
37 | @application.route('/wx', methods=["GET", "POST"])
38 | def get():
39 | if request.method == "GET": # 判断请求方式是GET请求
40 | my_echostr = request.args.get('echostr') # 获取携带的echostr参数
41 | return my_echostr
42 | else:
43 | # 表示微信服务器转发消息过来
44 | xml_str = request.data
45 | if not xml_str:
46 | return ""
47 | resp_dict = None
48 | re_content = "信息错误"
49 | # 对xml字符串进行解析
50 | xml_dict = xmltodict.parse(xml_str)
51 | xml_dict = xml_dict.get("xml")
52 |
53 | application.logger.info(f"xml_dict:{xml_dict}")
54 |
55 | # 提取消息类型
56 | msg_type = xml_dict.get("MsgType")
57 | if msg_type == "text": # 表示发送的是文本消息
58 | content = xml_dict.get("Content")
59 | if content == "查询已订阅股票":
60 | users = get_filter_users(filter={'wechat': xml_dict.get("FromUserName")})
61 | if users:
62 | re_content = str(users[0]['stocks'])
63 | else:
64 | re_content = "尚未绑定微信"
65 | # elif re.fullmatch(r'\d{6}\.\w{2}', content):
66 | # re_content = "code: " + content
67 | elif content.startswith("查询 "):
68 | try:
69 | datas = content.split(" ")
70 | stock_id = datas[1]
71 | users = get_filter_users(filter={'wechat': xml_dict.get("FromUserName")})
72 | if users:
73 | user_name = users[0]['_id']
74 | url = f'http://{user_send_host}:{user_send_port}/send_user'
75 | data = {'user_id': json.dumps(user_name), 'stock_id': json.dumps(stock_id),
76 | 'old_result_len': json.dumps(0)} # 将携带的参数传给params
77 | re_len = requests.get(url, params=data).json()
78 | re_content = "发送成功: {} {} {}".format(user_name, stock_id, re_len)
79 | else:
80 | re_content = "尚未绑定微信"
81 | except Exception as e:
82 | re_content = "发送失败: " + str(e)
83 | application.logger.error(re_content, exc_info=True)
84 | elif content.startswith("绑定 "):
85 | datas = content.split(" ")
86 | user_name = datas[1]
87 | # 此处逻辑需要细考
88 | if get_filter_users(filter={'_id': user_name}):
89 | re_content = "您要绑定的用户名:{},已被人绑定!请联系微信435878393".format(user_name)
90 | elif get_filter_users(filter={'wechat': xml_dict.get("FromUserName")}):
91 | re_content = "您的微信已被绑定!请联系微信435878393"
92 | else:
93 | if insert_users(document={'_id': user_name, 'wechat': xml_dict.get("FromUserName")}):
94 | re_content = "绑定成功"
95 | else:
96 | re_content = "绑定失败"
97 | elif content.startswith("订阅 "):
98 | datas = content.split(" ")
99 | stock_id = datas[1]
100 | users = get_filter_users(filter={'wechat': xml_dict.get("FromUserName")})
101 | if users:
102 | user = users[0]
103 | if 'stocks' not in user.keys():
104 | user['stocks'] = []
105 | stocks = set(user['stocks'])
106 | stocks.add(stock_id)
107 | _filter = {'wechat': xml_dict.get("FromUserName")}
108 | _update = {'$set': {'stocks': list(stocks)}}
109 | result = update_one_user(filter=_filter, update=_update)
110 | re_content = f"订阅失败. {_filter} {_update} {result.raw_result}"
111 | if result is not None and result.modified_count > 0:
112 | re_content = "订阅成功"
113 | else:
114 | re_content = "尚未绑定微信"
115 | elif content.startswith("取消订阅 "):
116 | datas = content.split(" ")
117 | stock_id = datas[1]
118 | users = get_filter_users(filter={'wechat': xml_dict.get("FromUserName")})
119 | if users:
120 | user = users[0]
121 | if 'stocks' not in user.keys():
122 | user['stocks'] = []
123 | stocks = set(user['stocks'])
124 | if stock_id in stocks:
125 | stocks.remove(stock_id)
126 | _filter = {'wechat': xml_dict.get("FromUserName")}
127 | _update = {'$set': {'stocks': list(stocks)}}
128 | result = update_one_user(filter=_filter, update=_update)
129 | re_content = f"尚未订阅{stock_id}. {_filter} {_update} {result.raw_result}"
130 | if result is not None and result.modified_count > 0:
131 | re_content = "取消订阅成功"
132 | else:
133 | re_content = f"尚未订阅{stock_id}"
134 | else:
135 | re_content = "尚未绑定微信"
136 | else:
137 | re_content = content
138 |
139 | if not resp_dict:
140 | # 构造返回值,经由微信服务器回复给用户的消息内容
141 | resp_dict = {
142 | "xml": {
143 | "ToUserName": xml_dict.get("FromUserName"),
144 | "FromUserName": xml_dict.get("ToUserName"),
145 | "CreateTime": int(time.time()),
146 | "MsgType": "text",
147 | "Content": re_content,
148 | }
149 | }
150 |
151 | # 将字典转换为xml字符串
152 | resp_xml_str = xmltodict.unparse(resp_dict)
153 |
154 | application.logger.info(f"resp_xml_str:{resp_xml_str}")
155 |
156 | # 返回消息数据给微信服务器
157 | return resp_xml_str
158 |
159 |
160 | if __name__ == "__main__":
161 | application.logger.info(f"VERSION: {VERSION}")
162 | # application.run(host="0.0.0.0", port=5000)
163 | application.run(host="0.0.0.0")
164 |
--------------------------------------------------------------------------------