1
This commit is contained in:
parent
b8e99f3980
commit
46f640fcad
@ -129,6 +129,20 @@ async def add_report(data_in: schemas.AddReport,
|
|||||||
return schemas.Msg(code=0, msg='ok', data='ok')
|
return schemas.Msg(code=0, msg='ok', data='ok')
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/edit_report")
|
||||||
|
async def edit_report(data_in: schemas.EditReport,
|
||||||
|
game: str,
|
||||||
|
db: AsyncIOMotorDatabase = Depends(get_database),
|
||||||
|
current_user: schemas.UserDB = Depends(deps.get_current_user)
|
||||||
|
):
|
||||||
|
"""编辑报表"""
|
||||||
|
|
||||||
|
res = await crud.dashboard.update_one(db, {'_id': data_in.id, 'reports.report_id': data_in.report.report_id},
|
||||||
|
{'$set': {'reports.$': data_in.report.dict()}})
|
||||||
|
|
||||||
|
return schemas.Msg(code=0, msg='ok', data='ok')
|
||||||
|
|
||||||
|
|
||||||
@router.post("/del_report")
|
@router.post("/del_report")
|
||||||
async def del_report(
|
async def del_report(
|
||||||
game: str,
|
game: str,
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
import datetime
|
import datetime
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
import mimetypes
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from fastapi import APIRouter, Depends, Request
|
from fastapi import APIRouter, Depends, Request
|
||||||
from motor.motor_asyncio import AsyncIOMotorDatabase
|
from motor.motor_asyncio import AsyncIOMotorDatabase
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
|
||||||
import crud, schemas
|
import crud, schemas
|
||||||
from common import *
|
from common import *
|
||||||
@ -16,6 +19,7 @@ from db.redisdb import get_redis_pool, RedisDrive
|
|||||||
|
|
||||||
from models.behavior_analysis import BehaviorAnalysis
|
from models.behavior_analysis import BehaviorAnalysis
|
||||||
from models.user_analysis import UserAnalysis
|
from models.user_analysis import UserAnalysis
|
||||||
|
from utils import DfToStream
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
@ -46,6 +50,54 @@ async def event_model_sql(
|
|||||||
return schemas.Msg(code=0, msg='ok', data=data)
|
return schemas.Msg(code=0, msg='ok', data=data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/event_model_export")
|
||||||
|
async def event_model_export(request: Request,
|
||||||
|
game: str,
|
||||||
|
ckdb: CKDrive = Depends(get_ck_db),
|
||||||
|
analysis: BehaviorAnalysis = Depends(BehaviorAnalysis),
|
||||||
|
current_user: schemas.UserDB = Depends(deps.get_current_user)
|
||||||
|
):
|
||||||
|
""" 事件分析模型 数据导出"""
|
||||||
|
await analysis.init()
|
||||||
|
sqls = analysis.event_model_sql()
|
||||||
|
file_name = quote(f'{sqls[0]["report_name"]}.xlsx')
|
||||||
|
mime = mimetypes.guess_type(file_name)[0]
|
||||||
|
excels = []
|
||||||
|
for item in sqls:
|
||||||
|
sql = item['sql']
|
||||||
|
event_name = item['event_name']
|
||||||
|
df = await ckdb.query_dataframe(sql)
|
||||||
|
excels.append((df, event_name))
|
||||||
|
df_to_stream = DfToStream(*excels)
|
||||||
|
with df_to_stream as d:
|
||||||
|
export = d.to_stream()
|
||||||
|
return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'})
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# @router.get("/event_model_export")
|
||||||
|
# async def event_model_export(request: Request,
|
||||||
|
# game: str,
|
||||||
|
# report_id: str,
|
||||||
|
# ckdb: CKDrive = Depends(get_ck_db),
|
||||||
|
# # analysis: BehaviorAnalysis = Depends(BehaviorAnalysis),
|
||||||
|
# current_user: schemas.UserDB = Depends(deps.get_current_user)
|
||||||
|
# ):
|
||||||
|
# """ 事件分析模型 数据导出"""
|
||||||
|
# analysis = BehaviorAnalysis(game, schemas.CkQuery(report_id=report_id), get_redis_pool())
|
||||||
|
# await analysis.init()
|
||||||
|
# sqls = analysis.event_model_sql()
|
||||||
|
# res = []
|
||||||
|
# file_name = f'{sqls[0]["report_name"]}.xlsx'
|
||||||
|
# mime = mimetypes.guess_type(file_name)[0]
|
||||||
|
# for item in sqls[:1]:
|
||||||
|
# sql = item['sql']
|
||||||
|
# event_name = item['event_name']
|
||||||
|
# df = await ckdb.query_dataframe(sql)
|
||||||
|
# file = df_to_stream(df, event_name)
|
||||||
|
# return StreamingResponse(file, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'})
|
||||||
|
#
|
||||||
|
|
||||||
@router.post("/event_model")
|
@router.post("/event_model")
|
||||||
async def event_model(
|
async def event_model(
|
||||||
request: Request,
|
request: Request,
|
||||||
@ -83,7 +135,11 @@ async def event_model(
|
|||||||
q['groups'].append(groupby)
|
q['groups'].append(groupby)
|
||||||
q['values'].append(df['values'].to_list())
|
q['values'].append(df['values'].to_list())
|
||||||
q['sum'].append(int(df['values'].sum()))
|
q['sum'].append(int(df['values'].sum()))
|
||||||
q['date_range'] = [f'{i[0]}-{i[1]}' for i in df.set_index(['svrindex', 'name']).index]
|
if groupby:
|
||||||
|
q['date_range'] = [f'{i}' for i in df.set_index(groupby).index]
|
||||||
|
else:
|
||||||
|
q['date_range'] =['合计']
|
||||||
|
|
||||||
res.append(q)
|
res.append(q)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -111,6 +167,10 @@ async def event_model(
|
|||||||
concat_data.append((i, 0))
|
concat_data.append((i, 0))
|
||||||
df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)])
|
df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)])
|
||||||
df.sort_values('date', inplace=True)
|
df.sort_values('date', inplace=True)
|
||||||
|
if len(df) >= 2:
|
||||||
|
q['chain_ratio'] = round((df.iloc[-1, 1] - df.iloc[-2, 1]) * 100 / df.iloc[-2, 1], 2)
|
||||||
|
if len(df) >= 8:
|
||||||
|
q['wow'] = round((df.iloc[-1, 1] - df.iloc[-8, 1]) * 100 / df.iloc[-8, 1], 2)
|
||||||
q['values'].append(df['values'].to_list())
|
q['values'].append(df['values'].to_list())
|
||||||
q['sum'].append(int(df['values'].sum()))
|
q['sum'].append(int(df['values'].sum()))
|
||||||
if item['time_particle'] in ('P1D', 'P1W'):
|
if item['time_particle'] in ('P1D', 'P1W'):
|
||||||
@ -157,7 +217,7 @@ async def retention_model(request: Request,
|
|||||||
for i in set(date_range) - set(df.index):
|
for i in set(date_range) - set(df.index):
|
||||||
df.loc[i] = 0
|
df.loc[i] = 0
|
||||||
df.sort_index(inplace=True)
|
df.sort_index(inplace=True)
|
||||||
days = [i for i in range(unit_num+1)]
|
days = [i for i in range(unit_num + 1)]
|
||||||
summary_values = {}
|
summary_values = {}
|
||||||
today = datetime.datetime.today().date()
|
today = datetime.datetime.today().date()
|
||||||
for date, value in df.T.items():
|
for date, value in df.T.items():
|
||||||
@ -167,11 +227,11 @@ async def retention_model(request: Request,
|
|||||||
tmp['n'] = []
|
tmp['n'] = []
|
||||||
tmp['p_outflow'] = []
|
tmp['p_outflow'] = []
|
||||||
tmp['n_outflow'] = []
|
tmp['n_outflow'] = []
|
||||||
for i in range((today - date).days+1):
|
for i in range((today - date).days + 1):
|
||||||
if i > unit_num:
|
if i > unit_num:
|
||||||
break
|
break
|
||||||
p = float(getattr(value, f'p{i+1}'))
|
p = float(getattr(value, f'p{i + 1}'))
|
||||||
n = int(getattr(value, f'cnt{i+1}'))
|
n = int(getattr(value, f'cnt{i + 1}'))
|
||||||
p_outflow = round(100 - p, 2)
|
p_outflow = round(100 - p, 2)
|
||||||
n_outflow = value.cnt0 - n
|
n_outflow = value.cnt0 - n
|
||||||
tmp['p'].append(p)
|
tmp['p'].append(p)
|
||||||
|
@ -86,8 +86,11 @@ async def read_report(
|
|||||||
reports = await crud.report.read_report(db,project_id=data_in.project_id,
|
reports = await crud.report.read_report(db,project_id=data_in.project_id,
|
||||||
projection=projection, **ext_where)
|
projection=projection, **ext_where)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
for item in reports:
|
for item in reports:
|
||||||
item['added'] = False
|
item['added'] = False
|
||||||
|
# item['name'] = item['name']
|
||||||
item['show_config'] = dict()
|
item['show_config'] = dict()
|
||||||
added_ids = {item['report_id']: item for item in dashboard.get('reports', [])}
|
added_ids = {item['report_id']: item for item in dashboard.get('reports', [])}
|
||||||
if item['_id'] in added_ids:
|
if item['_id'] in added_ids:
|
||||||
|
@ -99,6 +99,7 @@ class BehaviorAnalysis:
|
|||||||
self.time_particle = None
|
self.time_particle = None
|
||||||
self.date_range = None
|
self.date_range = None
|
||||||
self.unit_num = None
|
self.unit_num = None
|
||||||
|
self.report_name = None
|
||||||
|
|
||||||
async def init(self):
|
async def init(self):
|
||||||
if self.data_in.report_id:
|
if self.data_in.report_id:
|
||||||
@ -116,6 +117,8 @@ class BehaviorAnalysis:
|
|||||||
self.event_view['endTime'] = arrow.get().shift(days=-int(e_days)).strftime('%Y-%m-%d 23:59:59')
|
self.event_view['endTime'] = arrow.get().shift(days=-int(e_days)).strftime('%Y-%m-%d 23:59:59')
|
||||||
self.event_view['startTime'] = arrow.get().shift(days=-int(s_days)).strftime('%Y-%m-%d 00:00:00')
|
self.event_view['startTime'] = arrow.get().shift(days=-int(s_days)).strftime('%Y-%m-%d 00:00:00')
|
||||||
|
|
||||||
|
self.report_name = report["name"]
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.event_view = self.data_in.eventView
|
self.event_view = self.data_in.eventView
|
||||||
@ -360,7 +363,8 @@ class BehaviorAnalysis:
|
|||||||
'date_range': self.date_range,
|
'date_range': self.date_range,
|
||||||
'event_name': event_name_display or event_name,
|
'event_name': event_name_display or event_name,
|
||||||
'format': format,
|
'format': format,
|
||||||
'time_particle': self.time_particle
|
'time_particle': self.time_particle,
|
||||||
|
'report_name': self.report_name or 'temp'
|
||||||
})
|
})
|
||||||
|
|
||||||
return sqls
|
return sqls
|
||||||
@ -435,8 +439,10 @@ ORDER BY level
|
|||||||
quota_interval_arr = event.get('quotaIntervalArr')
|
quota_interval_arr = event.get('quotaIntervalArr')
|
||||||
|
|
||||||
where = [
|
where = [
|
||||||
event_date_col >= self.start_date,
|
# event_date_col >= self.start_date,
|
||||||
event_date_col <= self.end_date,
|
# event_date_col <= self.end_date,
|
||||||
|
func.addHours(event_time_col, self.zone_time) >= self.start_date,
|
||||||
|
func.addHours(event_time_col, self.zone_time) <= self.end_date,
|
||||||
event_name_col == event_name
|
event_name_col == event_name
|
||||||
]
|
]
|
||||||
event_filter, _ = self.handler_filts(*self.events[0]['filts'])
|
event_filter, _ = self.handler_filts(*self.events[0]['filts'])
|
||||||
@ -611,7 +617,7 @@ ORDER BY values desc"""
|
|||||||
days = (arrow.get(self.end_date).date() - arrow.get(self.start_date).date()).days
|
days = (arrow.get(self.end_date).date() - arrow.get(self.start_date).date()).days
|
||||||
keep = []
|
keep = []
|
||||||
cnt = []
|
cnt = []
|
||||||
for i in range(days+1):
|
for i in range(days + 1):
|
||||||
keep.append(
|
keep.append(
|
||||||
f"""cnt{i + 1},round(cnt{i + 1} * 100 / cnt0, 2) as `p{i + 1}`""")
|
f"""cnt{i + 1},round(cnt{i + 1} * 100 / cnt0, 2) as `p{i + 1}`""")
|
||||||
cnt.append(f"""sum(if(dateDiff('day',a.reg_date,b.visit_date)={i},1,0)) as cnt{i + 1}""")
|
cnt.append(f"""sum(if(dateDiff('day',a.reg_date,b.visit_date)={i},1,0)) as cnt{i + 1}""")
|
||||||
@ -643,6 +649,6 @@ group by a.reg_date) log on reg.date=log.reg_date
|
|||||||
print(sql)
|
print(sql)
|
||||||
return {
|
return {
|
||||||
'sql': sql,
|
'sql': sql,
|
||||||
'date_range':self.date_range,
|
'date_range': self.date_range,
|
||||||
'unit_num': self.unit_num
|
'unit_num': self.unit_num
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,10 @@ class DelReport(DBBase):
|
|||||||
report_id: str
|
report_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class EditReport(DBBase):
|
||||||
|
report: Report
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------
|
# --------------------------------------------------------------
|
||||||
# 数据库模型
|
# 数据库模型
|
||||||
class DashboardDB(DBBase):
|
class DashboardDB(DBBase):
|
||||||
|
@ -2,3 +2,4 @@ from .adapter import *
|
|||||||
|
|
||||||
from . import casbin
|
from . import casbin
|
||||||
from .func import *
|
from .func import *
|
||||||
|
from .export import *
|
||||||
|
26
utils/export.py
Normal file
26
utils/export.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from io import StringIO, BytesIO
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
class DfToStream:
|
||||||
|
def __init__(self, *args, index=False, **kwargs):
|
||||||
|
self.dfs = args
|
||||||
|
self.index = index
|
||||||
|
self.writer = None
|
||||||
|
self.output = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.output = BytesIO()
|
||||||
|
self.writer = pd.ExcelWriter(self.output, engine='xlsxwriter')
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
self.writer.close()
|
||||||
|
self.output.seek(0)
|
||||||
|
|
||||||
|
def to_stream(self):
|
||||||
|
for item in self.dfs:
|
||||||
|
df = item[0]
|
||||||
|
sheet_name = item[1]
|
||||||
|
df.to_excel(self.writer, encoding='utf-8', sheet_name=sheet_name, index=False)
|
||||||
|
return self.output
|
Loading…
Reference in New Issue
Block a user