diff --git a/api/api_v1/endpoints/query.py b/api/api_v1/endpoints/query.py index 5a8eee2..794d38d 100644 --- a/api/api_v1/endpoints/query.py +++ b/api/api_v1/endpoints/query.py @@ -54,6 +54,8 @@ async def query_sql( sql = data_in.sql sql = sql.replace('$game', game) df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) df_to_stream = DfToStream((df, 'result')) with df_to_stream as d: export = d.to_stream() @@ -93,6 +95,8 @@ async def event_model_export(request: Request, sql = item['sql'] event_name = item['event_name'] df = await ckdb.query_dataframe(sql) + if df.empty: + continue if 'date' in df: df.sort_values('date', inplace=True) try: @@ -288,7 +292,7 @@ async def retention_model(request: Request, sql = res['sql'] df = await ckdb.query_dataframe(sql) if df.empty: - return schemas.Msg(code=0, msg='无数据', data=None) + return schemas.Msg(code=-9, msg='无数据', data=None) date_range = res['date_range'] unit_num = res['unit_num'] @@ -361,6 +365,8 @@ async def retention_model_export(request: Request, sql = data['sql'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) df_to_stream = DfToStream((df, '留存分析')) with df_to_stream as d: export = d.to_stream() @@ -386,6 +392,8 @@ async def retention_model_del( title = await crud.event_mana.get_show_name(db, game, event_a) title = f'{title}用户数' df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) concat_data = [] df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)]) df['date'] = df['date'].apply(lambda x: x.date()) @@ -473,6 +481,8 @@ async def funnel_model( groupby = res['groupby'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) # 补齐level数据 concat_data = [] for key, tmp_df in df.groupby(['date'] + groupby): @@ -616,6 +626,8 @@ async def retention_model_export(request: Request, mime = mimetypes.guess_type(file_name)[0] sql = res['sql'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) interval_type = res['interval_type'] analysis = res['analysis'] groupby = res['groupby'] @@ -734,6 +746,8 @@ async def scatter_model( res = await analysis.scatter_model_sql() sql = res['sql'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) df.fillna(0, inplace=True) df['values'] = df['values'].astype(int) interval_type = res['interval_type'] @@ -858,6 +872,8 @@ async def trace_model_sql( res = await analysis.trace_model_sql() sql = res['sql'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) chain_dict = defaultdict(dict) nodes = {'流失'} for event_names, count in zip(df['event_chain'], df['values']): @@ -923,6 +939,8 @@ async def user_property_model_export( sql = data['sql'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) df_to_stream = DfToStream((df, '用户属性')) with df_to_stream as d: export = d.to_stream() @@ -944,12 +962,11 @@ async def user_property_model( quota = res['quota'] groupby = res['groupby'] df = await ckdb.query_dataframe(sql) - if len(df) == 0: - return schemas.Msg(code=0, msg='查无数据', data=None) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) # 没有分组 - data = {'groupby': groupby} - title = [] + data = {} if not groupby: data['总体'] = int(df['values'][0]) @@ -957,7 +974,6 @@ async def user_property_model( else: sum_s = df.groupby(groupby)['values'].sum() - data = dict() for key, val in sum_s.items(): if isinstance(key, tuple): key = ','.join([str(i) for i in key]) diff --git a/api/api_v1/endpoints/xquery.py b/api/api_v1/endpoints/xquery.py index affc29e..f0fe3c2 100644 --- a/api/api_v1/endpoints/xquery.py +++ b/api/api_v1/endpoints/xquery.py @@ -54,7 +54,7 @@ async def ltv_model_sql( ltv_n = res['ltv_n'] df = await ckdb.query_dataframe(sql) if df.empty: - return schemas.Msg(code=-1, msg='查无数据') + return schemas.Msg(code=-9, msg='查无数据') df.fillna(0, inplace=True) for d in set(res['date_range']) - set(df['date']): df.loc[len(df)] = 0 @@ -116,6 +116,8 @@ async def ltv_model_export(request: Request, sql = data['sql'] df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='查无数据') df_to_stream = DfToStream((df, 'ltv')) with df_to_stream as d: export = d.to_stream()