This commit is contained in:
wuaho 2021-09-06 19:41:21 +08:00
parent 779369ecf6
commit 28ccf54d65
4 changed files with 67 additions and 20 deletions

View File

@ -178,7 +178,7 @@ async def load_prop_quotas(request: Request,
'id': item['name'],
'data_type': data_type,
'title': title,
'category': settings.CK_OPERATOR.get(data_type) or []
# 'category': settings.CK_OPERATOR.get(data_type) or []
}
event_props.append(event_prop)
if data_in.model == 'scatter':
@ -188,21 +188,18 @@ async def load_prop_quotas(request: Request,
"data_type": None,
"analysis": "times",
"title": "次数",
"category": []
},
{
"id": "*",
"data_type": None,
"analysis": "number_of_days",
"title": "天数",
"category": []
},
{
"id": "*",
"data_type": None,
"analysis": "number_of_hours",
"title": "小时数",
"category": []
},
]
else:
@ -212,21 +209,18 @@ async def load_prop_quotas(request: Request,
"data_type": None,
"analysis": "total_count",
"title": "总次数",
"category": []
},
{
"id": "*",
"analysis": "touch_user_count",
"data_type": None,
"title": "触发用户数",
"category": []
},
{
"id": "*",
"analysis": "touch_user_avg",
"data_type": None,
"title": "人均次数",
"category": []
},
]
@ -263,7 +257,6 @@ async def load_filter_props(request: Request,
'id': item['name'],
'data_type': data_type,
'title': title,
'category': settings.CK_FILTER.get(data_type) or []
}
event_props.append(event_prop)
@ -277,7 +270,6 @@ async def load_filter_props(request: Request,
'id': item['name'],
'data_type': data_type,
'title': title,
'category': settings.CK_FILTER.get(data_type) or []
}
user_props.append(user_prop)

View File

@ -198,6 +198,15 @@ async def event_model(
q['date_range'] = [d.strftime('%Y-%m-%d %H:%M:%S') for d in q['date_range']]
# q['eventNameDisplay']=item['event_name_display']
res.append(q)
# 按总和排序
for item in res:
sort_key = np.argsort(np.array(item['sum']))[::-1]
if item.get('groups'):
item['groups'] = np.array(item['groups'])[sort_key].tolist()
item['values'] = np.array(item['values'])[sort_key].tolist()
item['sum'] = np.array(item['sum'])[sort_key].tolist()
item['avg'] = np.array(item['avg'])[sort_key].tolist()
return schemas.Msg(code=0, msg='ok', data=res)
@ -274,6 +283,27 @@ async def retention_model(request: Request,
return schemas.Msg(code=0, msg='ok', data=resp)
@router.post("/retention_model_export")
async def retention_model_export(request: Request,
game: str,
ckdb: CKDrive = Depends(get_ck_db),
analysis: BehaviorAnalysis = Depends(BehaviorAnalysis),
current_user: schemas.UserDB = Depends(deps.get_current_user)
):
""" 留存分析模型 数据导出"""
await analysis.init()
data = analysis.retention_model_sql2()
file_name = quote(f'留存分析.xlsx')
mime = mimetypes.guess_type(file_name)[0]
sql = data['sql']
df = await ckdb.query_dataframe(sql)
df_to_stream = DfToStream((df, '留存分析'))
with df_to_stream as d:
export = d.to_stream()
return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'})
@router.post("/retention_model_del", deprecated=True)
async def retention_model_del(
request: Request,

View File

@ -1,9 +1,12 @@
import mimetypes
from collections import defaultdict
from urllib.parse import quote
import pandas as pd
import numpy as np
from fastapi import APIRouter, Depends, Request
from motor.motor_asyncio import AsyncIOMotorDatabase
from starlette.responses import StreamingResponse
import crud, schemas
from common import *
@ -16,6 +19,7 @@ from db.redisdb import get_redis_pool, RedisDrive
from models.behavior_analysis import BehaviorAnalysis
from models.user_analysis import UserAnalysis
from models.x_analysis import XAnalysis
from utils import DfToStream
router = APIRouter()
@ -60,3 +64,24 @@ async def ltv_model_sql(
}
return schemas.Msg(code=0, msg='ok', data=data)
@router.post("/ltv_model_export")
async def ltv_model_export(request: Request,
game: str,
ckdb: CKDrive = Depends(get_ck_db),
analysis: XAnalysis = Depends(XAnalysis),
current_user: schemas.UserDB = Depends(deps.get_current_user)
):
""" ltv分析 数据导出"""
await analysis.init()
data = analysis.ltv_model_sql()
file_name = quote(f'lvt.xlsx')
mime = mimetypes.guess_type(file_name)[0]
sql = data['sql']
df = await ckdb.query_dataframe(sql)
df_to_stream = DfToStream((df, 'ltv'))
with df_to_stream as d:
export = d.to_stream()
return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'})

View File

@ -354,7 +354,7 @@ class BehaviorAnalysis:
qry = qry.order_by(sa.Column('date'))
else:
qry = qry.order_by(sa.Column('values').desc())
qry = qry.limit(1000)
qry = qry.limit(10000)
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
print(sql)
@ -592,8 +592,8 @@ ORDER BY values desc"""
}
def retention_model_sql2(self):
filter_item_type = self.event_view.get('filter-item-type')
filter_item = self.event_view.get('filter-item')
filter_item_type = self.event_view.get('filter_item_type')
filter_item = self.event_view.get('filter_item')
event_name_a = self.events[0]['eventName']
event_name_b = self.events[1]['eventName']