update
This commit is contained in:
parent
9aeb897e4a
commit
c1c2f64390
@ -212,6 +212,15 @@ async def funnel_model(
|
|||||||
groupby = res['groupby']
|
groupby = res['groupby']
|
||||||
|
|
||||||
df = await ckdb.query_dataframe(sql)
|
df = await ckdb.query_dataframe(sql)
|
||||||
|
# 补齐level数据
|
||||||
|
concat_data = []
|
||||||
|
for key, tmp_df in df.groupby(['date'] + groupby):
|
||||||
|
not_exists_level = {i for i in range(1, len(cond_level) + 1)} - set(tmp_df['level'])
|
||||||
|
for item in not_exists_level:
|
||||||
|
key = key if isinstance(key, tuple) else (key,)
|
||||||
|
concat_data.append((*key, item, 0))
|
||||||
|
df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)])
|
||||||
|
|
||||||
# df.set_index('date',inplace=True)
|
# df.set_index('date',inplace=True)
|
||||||
data_list = []
|
data_list = []
|
||||||
date_data = {}
|
date_data = {}
|
||||||
@ -231,6 +240,34 @@ async def funnel_model(
|
|||||||
tmp['p1'].append(round(v * 100 / tmp_df.loc[1, 'values'], 2))
|
tmp['p1'].append(round(v * 100 / tmp_df.loc[1, 'values'], 2))
|
||||||
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
||||||
data_list.append(tmp)
|
data_list.append(tmp)
|
||||||
|
|
||||||
|
# 补齐日期
|
||||||
|
all_idx = {(dt, lv) for dt in date_range for lv in range(1, len(cond_level) + 1)}
|
||||||
|
concat_data = []
|
||||||
|
for i in all_idx - set(df.set_index(['date', 'level']).index):
|
||||||
|
concat_data.append((*i, 0))
|
||||||
|
summary_df = pd.concat(
|
||||||
|
[df[['date', 'level', 'values']], pd.DataFrame(concat_data, columns=['date', 'level', 'values'])])
|
||||||
|
for key, tmp_df in summary_df.groupby('date'):
|
||||||
|
tmp_df = tmp_df.groupby('level').sum()
|
||||||
|
tmp_df.sort_index(inplace=True)
|
||||||
|
for i in tmp_df.index:
|
||||||
|
tmp_df.loc[i, 'values'] = tmp_df.loc[i:]['values'].sum()
|
||||||
|
|
||||||
|
tmp = dict()
|
||||||
|
|
||||||
|
tmp['n'] = tmp_df['values'].to_list()
|
||||||
|
tmp['p1'] = [100]
|
||||||
|
# tmp['p2'] = []
|
||||||
|
for i, v in tmp_df.loc[2:, 'values'].items():
|
||||||
|
var = round(v * 100 / tmp_df.loc[1, 'values'], 2)
|
||||||
|
var = 0 if np.isnan(var) else var
|
||||||
|
tmp['p1'].append(var)
|
||||||
|
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
||||||
|
|
||||||
|
_ = date_data.setdefault(key.strftime('%Y-%m-%d'), {})
|
||||||
|
_['总体'] = tmp
|
||||||
|
|
||||||
if groupby:
|
if groupby:
|
||||||
# 补齐数据
|
# 补齐数据
|
||||||
concat_data = []
|
concat_data = []
|
||||||
@ -253,7 +290,9 @@ async def funnel_model(
|
|||||||
tmp['p1'] = [100]
|
tmp['p1'] = [100]
|
||||||
# tmp['p2'] = []
|
# tmp['p2'] = []
|
||||||
for i, v in tmp_df.loc[2:, 'values'].items():
|
for i, v in tmp_df.loc[2:, 'values'].items():
|
||||||
tmp['p1'].append(round(v * 100 / tmp_df.loc[1, 'values'], 2))
|
var = round(v * 100 / tmp_df.loc[1, 'values'], 2)
|
||||||
|
var = 0 if np.isnan(var) else var
|
||||||
|
tmp['p1'].append(var)
|
||||||
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
||||||
data_list.append(tmp)
|
data_list.append(tmp)
|
||||||
|
|
||||||
@ -270,13 +309,15 @@ async def funnel_model(
|
|||||||
tmp['p1'] = [100]
|
tmp['p1'] = [100]
|
||||||
# tmp['p2'] = []
|
# tmp['p2'] = []
|
||||||
for i, v in tmp_df.loc[2:, 'values'].items():
|
for i, v in tmp_df.loc[2:, 'values'].items():
|
||||||
tmp['p1'].append(round(v * 100 / tmp_df.loc[1, 'values'], 2))
|
var = round(v * 100 / tmp_df.loc[1, 'values'], 2)
|
||||||
|
var = 0 if np.isnan(var) else var
|
||||||
|
tmp['p1'].append(var)
|
||||||
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
# tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2))
|
||||||
|
|
||||||
_ = date_data.setdefault(key[0].strftime('%Y-%m-%d'), {})
|
_ = date_data.setdefault(key[0].strftime('%Y-%m-%d'), {})
|
||||||
_[key[1]] = tmp
|
_[key[1]] = tmp
|
||||||
|
title = (groupby or ['总体']) + cond_level
|
||||||
resp = {'list': data_list, 'date_data': date_data, 'title': groupby + cond_level, 'level': cond_level
|
resp = {'list': data_list, 'date_data': date_data, 'title': title, 'level': cond_level
|
||||||
}
|
}
|
||||||
return schemas.Msg(code=0, msg='ok', data=resp)
|
return schemas.Msg(code=0, msg='ok', data=resp)
|
||||||
|
|
||||||
|
@ -273,7 +273,8 @@ ORDER BY level
|
|||||||
.group_by(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level')) \
|
.group_by(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level')) \
|
||||||
.order_by(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level'))
|
.order_by(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level'))
|
||||||
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
|
sql = str(qry.compile(compile_kwargs={"literal_binds": True}))
|
||||||
sql = sql.replace('_windows_gap_', f"({windows_gap},'strict_increase')")
|
# sql = sql.replace('_windows_gap_', f"({windows_gap},'strict_increase')")
|
||||||
|
sql = sql.replace('_windows_gap_', f"({windows_gap})")
|
||||||
print(sql)
|
print(sql)
|
||||||
return {'sql': sql,
|
return {'sql': sql,
|
||||||
'groupby': [i.key for i in self.groupby],
|
'groupby': [i.key for i in self.groupby],
|
||||||
|
Loading…
Reference in New Issue
Block a user