1
This commit is contained in:
parent
040487fac9
commit
5325b31466
@ -72,10 +72,19 @@ async def event_model(
|
|||||||
date_range = item['date_range']
|
date_range = item['date_range']
|
||||||
q['date_range'] = date_range
|
q['date_range'] = date_range
|
||||||
df = await ckdb.query_dataframe(sql)
|
df = await ckdb.query_dataframe(sql)
|
||||||
df.fillna(0,inplace=True)
|
df.fillna(0, inplace=True)
|
||||||
if df.shape[0] == 0:
|
if df.shape[0] == 0:
|
||||||
continue
|
continue
|
||||||
# return schemas.Msg(code=0, msg='ok', data=[q])
|
# return schemas.Msg(code=0, msg='ok', data=[q])
|
||||||
|
if item['time_particle'] == 'total':
|
||||||
|
# for group, df_group in df.groupby(groupby):
|
||||||
|
# df_group.reset_index(drop=True, inplace=True)
|
||||||
|
q['groups'].append(groupby)
|
||||||
|
q['values'].append(df['values'].to_list())
|
||||||
|
q['sum'].append(int(df['values'].sum()))
|
||||||
|
q['date_range']=[f'{i[0]}-{i[1]}' for i in df.set_index(['svrindex','name']).index]
|
||||||
|
res.append(q)
|
||||||
|
continue
|
||||||
|
|
||||||
if groupby:
|
if groupby:
|
||||||
# 有分组
|
# 有分组
|
||||||
@ -103,7 +112,12 @@ async def event_model(
|
|||||||
df.sort_values('date', inplace=True)
|
df.sort_values('date', inplace=True)
|
||||||
q['values'].append(df['values'].to_list())
|
q['values'].append(df['values'].to_list())
|
||||||
q['sum'].append(int(df['values'].sum()))
|
q['sum'].append(int(df['values'].sum()))
|
||||||
q['date_range'] = [d.strftime('%Y-%m-%d %H:%M:%S') for d in q['date_range']]
|
if item['time_particle'] in ('P1D', 'P1W'):
|
||||||
|
q['date_range'] = [d.strftime('%Y-%m-%d') for d in q['date_range']]
|
||||||
|
elif item['time_particle'] in ('P1M',):
|
||||||
|
q['date_range'] = [d.strftime('%Y-%m') for d in q['date_range']]
|
||||||
|
else:
|
||||||
|
q['date_range'] = [d.strftime('%Y-%m-%d %H:%M:%S') for d in q['date_range']]
|
||||||
# q['eventNameDisplay']=item['event_name_display']
|
# q['eventNameDisplay']=item['event_name_display']
|
||||||
res.append(q)
|
res.append(q)
|
||||||
return schemas.Msg(code=0, msg='ok', data=res)
|
return schemas.Msg(code=0, msg='ok', data=res)
|
||||||
|
@ -262,6 +262,7 @@ class Settings(BaseSettings):
|
|||||||
"P1D": "D",
|
"P1D": "D",
|
||||||
"P1W": "W",
|
"P1W": "W",
|
||||||
"P1M": "MS",
|
"P1M": "MS",
|
||||||
|
"total": "D",
|
||||||
}
|
}
|
||||||
|
|
||||||
TIME_GRAIN_EXPRESSIONS = {
|
TIME_GRAIN_EXPRESSIONS = {
|
||||||
@ -271,7 +272,8 @@ class Settings(BaseSettings):
|
|||||||
'PT10M': lambda col, zone: func.toStartOfTenMinutes(func.addHours(col, zone)).label('date'),
|
'PT10M': lambda col, zone: func.toStartOfTenMinutes(func.addHours(col, zone)).label('date'),
|
||||||
'PT15M': lambda col, zone: func.toStartOfFifteenMinutes(func.addHours(col, zone)).label('date'),
|
'PT15M': lambda col, zone: func.toStartOfFifteenMinutes(func.addHours(col, zone)).label('date'),
|
||||||
'PT1H': lambda col, zone: func.toStartOfHour(func.addHours(col, zone)).label('date'),
|
'PT1H': lambda col, zone: func.toStartOfHour(func.addHours(col, zone)).label('date'),
|
||||||
'P1D': lambda col, zone: func.toStartOfDay(func.addHours(col, zone)).label('date'),
|
'P1D': lambda col, zone: func.toDate(func.addHours(col, zone)).label('date'),
|
||||||
|
'total': lambda col, zone: func.toStartOfDay(func.addHours(col, zone)).label('date'),
|
||||||
'P1W': lambda col, zone: func.toStartOfWeek(func.addHours(col, zone)).label('date'),
|
'P1W': lambda col, zone: func.toStartOfWeek(func.addHours(col, zone)).label('date'),
|
||||||
'P1M': lambda col, zone: func.toStartOfMonth(func.addHours(col, zone)).label('date'),
|
'P1M': lambda col, zone: func.toStartOfMonth(func.addHours(col, zone)).label('date'),
|
||||||
}
|
}
|
||||||
|
@ -147,9 +147,10 @@ class BehaviorAnalysis:
|
|||||||
end_date: str = self.event_view.get('endTime')
|
end_date: str = self.event_view.get('endTime')
|
||||||
date_range = pd.date_range(start_date, end_date, freq=settings.PROPHET_TIME_GRAIN_MAP[self.time_particle],
|
date_range = pd.date_range(start_date, end_date, freq=settings.PROPHET_TIME_GRAIN_MAP[self.time_particle],
|
||||||
tz='UTC').tolist()
|
tz='UTC').tolist()
|
||||||
if self.time_particle in ('P1M','P1W'):
|
if self.time_particle in ('P1D', 'P1W', 'P1M'):
|
||||||
date_range = [item.date() for item in date_range]
|
date_range = [item.date() for item in date_range]
|
||||||
|
# start_date = date_range[0].strftime('%Y-%m-%d')
|
||||||
|
# end_date = date_range[-1].strftime('%Y-%m-%d')
|
||||||
|
|
||||||
return start_date, end_date, date_range
|
return start_date, end_date, date_range
|
||||||
|
|
||||||
@ -284,8 +285,10 @@ class BehaviorAnalysis:
|
|||||||
|
|
||||||
for event in self.events:
|
for event in self.events:
|
||||||
event_name_display = event.get('eventNameDisplay')
|
event_name_display = event.get('eventNameDisplay')
|
||||||
select_exprs = [
|
select_exprs = []
|
||||||
settings.TIME_GRAIN_EXPRESSIONS[self.time_particle](event_time_col, self.zone_time)]
|
if self.time_particle != 'total':
|
||||||
|
select_exprs.append(
|
||||||
|
settings.TIME_GRAIN_EXPRESSIONS[self.time_particle](event_time_col, self.zone_time))
|
||||||
|
|
||||||
base_where = [
|
base_where = [
|
||||||
func.addHours(event_time_col, self.zone_time) >= self.start_date,
|
func.addHours(event_time_col, self.zone_time) >= self.start_date,
|
||||||
@ -342,7 +345,10 @@ class BehaviorAnalysis:
|
|||||||
qry = sa.select(selectd).where(and_(*event_filter, *base_where))
|
qry = sa.select(selectd).where(and_(*event_filter, *base_where))
|
||||||
|
|
||||||
qry = qry.group_by(*select_exprs)
|
qry = qry.group_by(*select_exprs)
|
||||||
qry = qry.order_by(sa.Column('date'))
|
if self.time_particle != 'total':
|
||||||
|
qry = qry.order_by(sa.Column('date'))
|
||||||
|
else:
|
||||||
|
qry = qry.order_by(sa.Column('values').desc())
|
||||||
qry = qry.limit(1000)
|
qry = qry.limit(1000)
|
||||||
|
|
||||||
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
|
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
|
||||||
@ -351,7 +357,8 @@ class BehaviorAnalysis:
|
|||||||
'groupby': [i.key for i in self.groupby],
|
'groupby': [i.key for i in self.groupby],
|
||||||
'date_range': self.date_range,
|
'date_range': self.date_range,
|
||||||
'event_name': event_name_display or event_name,
|
'event_name': event_name_display or event_name,
|
||||||
'format': format
|
'format': format,
|
||||||
|
'time_particle': self.time_particle
|
||||||
})
|
})
|
||||||
|
|
||||||
return sqls
|
return sqls
|
||||||
@ -440,7 +447,7 @@ ORDER BY level
|
|||||||
|
|
||||||
qry = sa.select(event_date_col, *self.groupby, values_col) \
|
qry = sa.select(event_date_col, *self.groupby, values_col) \
|
||||||
.where(and_(*where)) \
|
.where(and_(*where)) \
|
||||||
.group_by(event_date_col, *self.groupby, e_account_id_col)
|
.group_by(event_date_col, *self.groupby)
|
||||||
|
|
||||||
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
|
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
|
||||||
print(sql)
|
print(sql)
|
||||||
|
Loading…
Reference in New Issue
Block a user