This commit is contained in:
wuaho 2021-09-18 10:42:26 +08:00
parent 7fb03eb90c
commit 8ae3568c15
3 changed files with 46 additions and 23 deletions

18
app.py
View File

@ -2,6 +2,8 @@
import time import time
from multiprocessing import Process from multiprocessing import Process
from kafka import TopicPartition
from settings import settings from settings import settings
from v2 import * from v2 import *
from v2.struct_cache import StructCacheFile, StructCacheRedis from v2.struct_cache import StructCacheFile, StructCacheRedis
@ -33,16 +35,16 @@ class XProcess(Process):
last_ts = int(time.time()) last_ts = int(time.time())
consumer, kafka_client = create_consumer(self.partition) consumer, kafka_client = create_consumer(self.partition)
for topic, msg in consumer(): for msg in consumer():
# print(msg) data = msg.value
type_ = msg['#type'] type_ = data['#type']
del msg['#type'] del data['#type']
ts = int(time.time()) ts = int(time.time())
if 'user' in type_: if 'user' in type_:
# continue # continue
obj = getattr(handler_user, type_) obj = getattr(handler_user, type_)
handler_user.receive_data.append(UserAct(obj, msg)) handler_user.receive_data.append(UserAct(obj, data))
if len(handler_user.receive_data) >= 1000 or last_ts + 60 < ts: if len(handler_user.receive_data) >= 1000 or last_ts + 60 < ts:
last_ts = ts last_ts = ts
handler_user.execute() handler_user.execute()
@ -50,12 +52,12 @@ class XProcess(Process):
elif 'track' in type_: elif 'track' in type_:
# continue # continue
obj = getattr(handler_event, type_) obj = getattr(handler_event, type_)
obj(msg) obj(data)
elif type_ == settings.STOP_SIGNAL: elif type_ == settings.STOP_SIGNAL:
# continue # continue
# 1 小时内有效 # 1 小时内有效
self.log.info(type_) self.log.info(type_)
if msg.get('#time', 0) + 3600 < int(time.time()): if data.get('#time', 0) + 3600 < int(time.time()):
continue continue
kafka_client.close() kafka_client.close()
# 停止消费kafka # 停止消费kafka
@ -67,7 +69,7 @@ class XProcess(Process):
break break
elif type_ == 'test': elif type_ == 'test':
self.log.info(f'消费分区{self.partition} -> {msg}') self.log.info(f'topid->{msg.topic} | partition->{msg.partition} | offset->{msg.offset} | data-> {data}')
else: else:
continue continue

View File

@ -1,6 +1,8 @@
# coding:utf-8
import time import time
import redis import redis
from kafka import TopicPartition
from settings import settings from settings import settings
from v2 import * from v2 import *
@ -16,6 +18,7 @@ from v2.log import logger
rdb = redis.Redis(**settings.REDIS_CONF) rdb = redis.Redis(**settings.REDIS_CONF)
event_attr = EventAttr(rdb) event_attr = EventAttr(rdb)
partition = -1
def run(): def run():
db_client = CK(**settings.CK_CONFIG) db_client = CK(**settings.CK_CONFIG)
@ -23,25 +26,21 @@ def run():
handler_event = HandlerEvent(db_client, settings.GAME, ipsearch) handler_event = HandlerEvent(db_client, settings.GAME, ipsearch)
handler_user = HandlerUser(db_client, settings.GAME) handler_user = HandlerUser(db_client, settings.GAME)
transmitter = Transmitter(db_client, settings.GAME, sketch, logger, lock, event_attr) transmitter = Transmitter(db_client, settings.GAME, sketch, logger, lock, event_attr)
transmitter.add_source(handler_event, 10000, 10) transmitter.add_source(handler_event, 1000, 10)
transmitter.add_source(handler_user, 1000, 10) transmitter.add_source(handler_user, 1000, 10)
last_ts = int(time.time()) last_ts = int(time.time())
consumer, kafka_client = create_consumer(-1) consumer, kafka_client = create_consumer(partition)
for topic, msg in consumer(): for msg in consumer():
# print(msg) data = msg.value
type_ = msg['#type'] type_ = data['#type']
# if msg['#event_name'] != 'pay': del data['#type']
# continue
# print(msg)
del msg['#type']
ts = int(time.time()) ts = int(time.time())
if 'user' in type_: if 'user' in type_:
# continue # continue
obj = getattr(handler_user, type_) obj = getattr(handler_user, type_)
handler_user.receive_data.append(UserAct(obj, msg)) handler_user.receive_data.append(UserAct(obj, data))
if len(handler_user.receive_data) >= 1000 or last_ts + 60 < ts: if len(handler_user.receive_data) >= 1000 or last_ts + 60 < ts:
last_ts = ts last_ts = ts
handler_user.execute() handler_user.execute()
@ -49,12 +48,33 @@ def run():
elif 'track' in type_: elif 'track' in type_:
# continue # continue
obj = getattr(handler_event, type_) obj = getattr(handler_event, type_)
obj(msg) obj(data)
elif type_ == settings.STOP_SIGNAL:
# continue
# 1 小时内有效
print(type_)
if data.get('#time', 0) + 3600 < int(time.time()):
continue
kafka_client.close()
# 停止消费kafka
print(f'进程{msg.partition} 等待90秒')
time.sleep(1)
print(f'进程{msg.partition} 写入数据')
transmitter.run()
print(f'进程{msg.partition} 结束')
break
elif type_ == 'test':
print(f'topid->{msg.topic} | partition->{msg.partition} | offset->{msg.offset} | data-> {data}')
else: else:
continue continue
transmitter.run() transmitter.run()
while True:
time.sleep(5)
print(f'消费分区{partition} 已结束。。。')
if __name__ == '__main__': if __name__ == '__main__':
run() run()

View File

@ -16,8 +16,9 @@ def create_consumer(partition=-1):
c.subscribe([settings.SUBSCRIBE_TOPIC]) c.subscribe([settings.SUBSCRIBE_TOPIC])
for msg in c: for msg in c:
# print(msg) # print(msg)
topic = msg.topic yield msg
val = msg.value # topic = msg.topic
yield topic, val # val = msg.value
# yield topic, val
return consumer, c return consumer, c