newlogger ok

This commit is contained in:
aozhiwei 2020-03-26 14:21:45 +08:00
parent f208a5e46b
commit 8cbaf9c963
5 changed files with 136 additions and 23 deletions

2
third_party/f7 vendored

@ -1 +1 @@
Subproject commit 558d01eae5f5c9770e35fc316292a9717a1fa5da
Subproject commit 11b6e20c1554abbd591451f08103477e2015d718

1
tools/local_packages/f7 Symbolic link
View File

@ -0,0 +1 @@
../../third_party/f7/f7

1
tools/local_packages/q7 Symbolic link
View File

@ -0,0 +1 @@
../../third_party/q7/q7

View File

@ -9,23 +9,114 @@ import f7
import os
import time
import json
import shutil
import datetime
def saveToDB(conf, savetime):
f7.udplog.info('saveToDB begin')
IDX_ZONE_NAME = 0
IDX_ZONE_ID = 1
IDX_PLATID = 2
IDX_ACCOUNT = 3
IDX_CHARID = 4
IDX_LOGTIME= 5
IDX_MAX = 6
CONFIG_DIR = '../config' if f7.isOnlineEnv() else '/var/data/conf_test/wjtx/newlogger'
def getLogConf(filename):
log_type = f7.app.ext['log_type']
for key in log_type:
if filename.find(key) == 0:
return log_type[key], key
return None, None
def processOneFile(filename, log_key, log_conf):
log_dir = f7.app.ext['conf']['log_dir']
out_dir = f7.app.ext['conf']['out_dir']
old_dir = f7.app.ext['conf']['old_dir']
data = ''
event_name = log_key
if log_key[-1] == '_':
event_name = log_key[:-1]
for line in open(log_dir + filename, 'r').readlines():
if len(line) > 0 and line[-1] == '\n':
line = line[:-1]
values = line.split(' | ')
if len(values) != len(log_conf['fields']) + IDX_MAX:
print(filename, values)
properties = {
'os': values[IDX_PLATID],
'server_id': values[IDX_ZONE_ID]
}
account_id = values[IDX_ACCOUNT]
distance_id = values[IDX_ZONE_ID]
platform = ''
if account_id == '-':
account_id = ''
if values[IDX_CHARID] != '':
properties['role_id'] = values[IDX_CHARID]
if account_id != '':
properties['account_id'] = account_id
if account_id.find('_') != -1:
platform = account_id[:account_id.find('_')]
else:
platform = account_id
properties['platform'] = platform
for idx in range(len(log_conf['fields'])):
field_conf = log_conf['fields'][idx]
if field_conf['field_type'] == 'number':
# print(field_conf)
properties[field_conf['field_name']] = float(values[IDX_MAX + idx])
else:
properties[field_conf['field_name']] = values[IDX_MAX + idx]
jsonobj = {
'#account_id': account_id,
'#distinct_id': distance_id,
'#type': 'track',
'#time': values[IDX_LOGTIME],
'#ip': '127.0.0.1',
'#event_name': event_name,
'properties': properties
}
data += json.dumps(jsonobj) + '\n'
#end for
if len(data) > 0:
shutil.move(log_dir + filename, old_dir + filename)
out_file = open(out_dir + filename, 'w')
out_file.write(data)
out_file.close()
def syncLogThreadProc():
try:
os.system('python redis_save.py >> redis_save.log')
file_names = os.listdir(f7.app.ext['conf']['log_dir'])
for filename in file_names:
log_conf, log_key = getLogConf(filename)
if log_conf:
processOneFile(filename, log_key, log_conf)
#end for
print('sync ok')
except Exception as e:
f7.udplog.info('saveToDB error ' + e)
f7.udplog.info('saveToDB end')
#进入下一次循环
f7.timer.callAt(q7.getDaySeconds(time.time(), 0) + 3600 * 24 + savetime,
lambda : saveToDB(conf, savetime))
print(e, flush=True)
def loggerSyncLog(request):
if not f7.app.ext['syncing']:
def done_callback():
f7.app.ext['syncing'] = False
f7.app.createAsyncTask(done_callback, syncLogThreadProc, ())
f7.app.ext['syncing'] = True
return json.dumps({
'errcode': 0,
'errmsg': ''
})
if __name__ == "__main__":
f7.app.init('/data/logs/game1009newlogger/logs')
print('pid:' + str(os.getpid()))
f7.udplog.info('game1009newlogger start pid:' + str(os.getpid()))
conf = json.loads(open('../config/game1009newlogger.json', 'r').read())
conf = json.loads(open(CONFIG_DIR + '/game1009newlogger.json', 'r').read())
f7.app.ext['syncing'] = False
f7.app.ext['conf'] = conf
f7.app.ext['log_type'] = json.loads(open('log_type.json', 'r').read())
f7.app.registerHandler('Logger', 'syncLog', loggerSyncLog)
f7.app.listen(conf['listen_port'])
f7.app.start()

View File

@ -169,6 +169,10 @@
"field_name": "obj_name",
"field_type": "string"
},
{
"field_name": "event",
"field_type": "string"
},
{
"field_name": "type",
"field_type": "string"
@ -202,17 +206,37 @@
"obj_": {
"fields": [
{
"field_name": "name",
"field_name": "src_name",
"field_type": "string"
},
{
"field_name": "level",
"field_name": "src_level",
"field_type": "number"
},
{
"field_name": "i_career",
"field_name": "src_career",
"field_type": "number"
},
{
"field_name": "dst_acct",
"field_type": "string"
},
{
"field_name": "dst_id",
"field_type": "string"
},
{
"field_name": "dst_name",
"field_type": "string"
},
{
"field_name": "dst_level",
"field_type": "number"
},
{
"field_name": "dst_career",
"field_type": "number"
},
{
"field_name": "obj_type",
"field_type": "string"
@ -273,10 +297,6 @@
"field_name": "before_level",
"field_type": "number"
},
{
"field_name": "before_level",
"field_type": "number"
},
{
"field_name": "after_level",
"field_type": "number"
@ -415,7 +435,7 @@
"fields": [
{
"field_name": "act_id",
"field_type": "number"
"field_type": "string"
}
]
},
@ -633,6 +653,10 @@
"field_name": "operate",
"field_type": "string"
},
{
"field_name": "map_type",
"field_type": "string"
},
{
"field_name": "other_id",
"field_type": "string"
@ -685,10 +709,6 @@
"field_name": "before_rank",
"field_type": "number"
},
{
"field_name": "before_rank",
"field_type": "number"
},
{
"field_name": "after_rank",
"field_type": "number"
@ -947,7 +967,7 @@
}
]
},
"chat_": {
"_chat_": {
"fields": [
{
"field_name": "type",