-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsync_for_cache.py
More file actions
59 lines (48 loc) · 1.74 KB
/
sync_for_cache.py
File metadata and controls
59 lines (48 loc) · 1.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
"""
缓存数据
sudo docker run -d --name sync-for-cache -v `pwd`:/cpchain-website liaojl/website python sync_for_cache.py
sudo docker logs -f --since 1m sync-for-cache
"""
import sys
import time
import json
from log import get_log
from tools.dingding import post_message
from apps.chain.db import txs_collection
from tools import redis_helper as rh
logger = get_log('sync-for-cache')
def sync_for_cache(txs):
try:
rc = rh.get_redis_client()
current_tx_ts = rc.get('current_tx_ts')
if current_tx_ts is None:
current_tx_ts = 0
while True:
filters = {
"value": {
"$gt": 0
},
"timestamp": {
"$gt": current_tx_ts
}
}
count = txs.count_documents(filters)
logger.info("txs count: %d, current timestamp: %s", count, str(current_tx_ts))
# 获取所有的 value 不为 0 的交易
cursor = txs.find(filters, projection={"_id": False})
for r in cursor:
# 获取此 from 的最后一笔交易,判断时间戳,只有之后的时间戳,可加入
latest = rh.tail_tx(rc, r['from'])
if latest != None:
tx = json.loads(latest)
if r['timestamp'] <= tx['timestamp']:
continue
rh.push_tx(rc, r['from'], json.dumps(r))
current_tx_ts = r['timestamp']
rc.set('current_tx_ts', current_tx_ts)
time.sleep(10)
except Exception as e:
logger.error(e)
post_message(f"**sync for cache error:**\n{e}")
if __name__ == '__main__':
sync_for_cache(txs_collection)