From 405e2717b034f3a6d31bd8d4c993cf9d7b451b04 Mon Sep 17 00:00:00 2001 From: wuaho Date: Tue, 7 Sep 2021 17:25:49 +0800 Subject: [PATCH] 1 --- api/api_v1/endpoints/query.py | 32 +++++++++++++++++++++++++++----- api/api_v1/endpoints/xquery.py | 4 +++- models/behavior_analysis.py | 30 +++++++++++++++++++++++++----- models/x_analysis.py | 5 ++++- 4 files changed, 59 insertions(+), 12 deletions(-) diff --git a/api/api_v1/endpoints/query.py b/api/api_v1/endpoints/query.py index a149b00..0b3bb03 100644 --- a/api/api_v1/endpoints/query.py +++ b/api/api_v1/endpoints/query.py @@ -120,7 +120,10 @@ async def event_model( 'avg': [], 'event_name': item['event_name'], 'format': item['format'], - 'last_value': 0 + 'last_value': 0, + 'start_date': item['start_date'], + 'end_date': item['end_date'], + 'time_particle': item['time_particle'] } sql = item['sql'] groupby = item['groupby'] @@ -278,6 +281,9 @@ async def retention_model(request: Request, 'title': title, 'filter_item_type': filter_item_type, 'filter_item': filter_item, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] } return schemas.Msg(code=0, msg='ok', data=resp) @@ -371,7 +377,10 @@ async def retention_model_del( 'values': values, 'days': days, 'date_range': [d.strftime('%Y-%m-%d') for d in date_range][:unit_num + 1], - 'title': title + 'title': title, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] } return schemas.Msg(code=0, msg='ok', data=data) @@ -512,7 +521,13 @@ async def funnel_model( _ = date_data.setdefault(key[0].strftime('%Y-%m-%d'), {}) _[key[1]] = tmp title = (groupby or ['总体']) + cond_level - resp = {'list': data_list, 'date_data': date_data, 'title': title, 'level': cond_level + resp = {'list': data_list, + 'date_data': date_data, + 'title': title, + 'level': cond_level, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] } return schemas.Msg(code=0, msg='ok', data=resp) @@ -552,7 +567,11 @@ async def scatter_model( max_v = int(df['values'].max()) min_v = int(df['values'].min()) interval = (max_v - min_v) // 10 or 1 - resp = {'list': dict()} + resp = {'list': dict(), + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } if not quota_interval_arr: resp['label'] = [f'[{i},{i + interval})' for i in range(min_v, max_v, interval)] @@ -653,7 +672,10 @@ async def trace_model_sql( # ) data = { 'nodes': [{'name': item} for item in nodes], - 'links': links + 'links': links, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] } return schemas.Msg(code=0, msg='ok', data=data) diff --git a/api/api_v1/endpoints/xquery.py b/api/api_v1/endpoints/xquery.py index 0284129..f4e604c 100644 --- a/api/api_v1/endpoints/xquery.py +++ b/api/api_v1/endpoints/xquery.py @@ -60,7 +60,9 @@ async def ltv_model_sql( df.rename(columns={'cnt1': '设备数'}, inplace=True) data = { 'title': df.columns.tolist(), - 'rows': df.values.tolist() + 'rows': df.values.tolist(), + 'start_date': res['start_date'], + 'end_date': res['end_date'] } return schemas.Msg(code=0, msg='ok', data=data) diff --git a/models/behavior_analysis.py b/models/behavior_analysis.py index 8d90018..5b5c95a 100644 --- a/models/behavior_analysis.py +++ b/models/behavior_analysis.py @@ -281,7 +281,10 @@ class BehaviorAnalysis: 'groupby': ['date'] + [i.key for i in self.groupby], 'date_range': self.date_range, 'event_name': [event_name_a, event_name_b], - 'unit_num': self.unit_num + 'unit_num': self.unit_num, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], } def event_model_sql(self): @@ -363,8 +366,10 @@ class BehaviorAnalysis: 'date_range': self.date_range, 'event_name': event_name_display or event_name, 'format': format, + 'report_name': self.report_name or 'temp', 'time_particle': self.time_particle, - 'report_name': self.report_name or 'temp' + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], }) return sqls @@ -424,7 +429,10 @@ ORDER BY level return {'sql': sql, 'groupby': [i.key for i in self.groupby], 'date_range': self.date_range, - 'cond_level': cond_level + 'cond_level': cond_level, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], } def scatter_model_sql(self): @@ -464,7 +472,10 @@ ORDER BY level 'interval_type': event['intervalType'], 'analysis': analysis, 'quota_interval_arr': quota_interval_arr, - 'groupby': [i.key for i in self.groupby] + 'groupby': [i.key for i in self.groupby], + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], } elif event.get('quota'): event_attr_col = getattr(self.event_tbl.c, event['quota']) @@ -479,7 +490,10 @@ ORDER BY level 'interval_type': event['intervalType'], 'analysis': analysis, 'quota_interval_arr': quota_interval_arr, - 'groupby': [i.key for i in self.groupby] + 'groupby': [i.key for i in self.groupby], + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], } def trace_model_sql(self): @@ -589,6 +603,9 @@ ORDER BY values desc""" print(sql) return { 'sql': sql, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], } def retention_model_sql2(self): @@ -655,4 +672,7 @@ group by a.reg_date) log on reg.date=log.reg_date 'unit_num': self.unit_num, 'filter_item_type': filter_item_type, 'filter_item': filter_item, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], } diff --git a/models/x_analysis.py b/models/x_analysis.py index 480f0c3..3695d43 100644 --- a/models/x_analysis.py +++ b/models/x_analysis.py @@ -145,4 +145,7 @@ class XAnalysis: order by date """ print(sql) - return {'sql': sql,'quota':quota} + return {'sql': sql,'quota':quota, + 'start_date': self.event_view['startTime'][:10], + 'end_date': self.event_view['endTime'][:10], + }