From daf96f40fe19077de0a372c4a93d4f1b4939552a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E4=BC=9F?= <250213850@qq.com> Date: Mon, 30 May 2022 10:48:26 +0800 Subject: [PATCH] =?UTF-8?q?1.=E4=BA=BA=E4=BA=8B=E5=88=9D=E5=A7=8B=E7=89=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 132 ++ Pipfile | 29 + README.md | 7 + api/__init__.py | 0 api/api_v1/__init__.py | 0 api/api_v1/api.py | 40 + api/api_v1/authz/__init__.py | 0 api/api_v1/authz/authz.py | 616 ++++++ api/api_v1/check_data/__init__.py | 1 + api/api_v1/check_data/controller.py | 51 + api/api_v1/check_data/service.py | 127 ++ api/api_v1/ck_mana/__init__.py | 0 api/api_v1/ck_mana/event.py | 31 + api/api_v1/endpoints/__init__.py | 0 api/api_v1/endpoints/authority.py | 264 +++ api/api_v1/endpoints/dashboard.py | 260 +++ api/api_v1/endpoints/data_auth.py | 306 +++ api/api_v1/endpoints/data_mana.py | 217 ++ api/api_v1/endpoints/event_mana.py | 64 + api/api_v1/endpoints/folder.py | 40 + api/api_v1/endpoints/project.py | 369 ++++ api/api_v1/endpoints/query.py | 1772 +++++++++++++++++ api/api_v1/endpoints/report.py | 166 ++ api/api_v1/endpoints/space.py | 99 + api/api_v1/endpoints/test.py | 39 + api/api_v1/endpoints/user.py | 161 ++ api/api_v1/endpoints/xquery.py | 271 +++ api/api_v1/user_label/__init__.py | 0 api/api_v1/user_label/controller.py | 104 + api/api_v1/user_label/service.py | 66 + api/deps.py | 61 + ck_test.py | 35 + common/__init__.py | 1 + common/compute.py | 12 + core/__init__.py | 0 core/config.py | 417 ++++ core/security.py | 32 + crud/__init__.py | 23 + crud/base.py | 62 + crud/crud_api_board.py | 35 + crud/crud_api_list.py | 41 + crud/crud_api_log.py | 14 + crud/crud_api_module.py | 35 + crud/crud_authority.py | 92 + crud/crud_check_data.py | 15 + crud/crud_dashboard.py | 29 + crud/crud_data_attr.py | 24 + crud/crud_data_auth.py | 38 + crud/crud_event_list.py | 25 + crud/crud_event_mana.py | 35 + crud/crud_folder.py | 30 + crud/crud_proid_map.py | 29 + crud/crud_project.py | 45 + crud/crud_project_number.py | 33 + crud/crud_report.py | 33 + crud/crud_role.py | 43 + crud/crud_space.py | 45 + crud/crud_url_list.py | 42 + crud/crud_user.py | 76 + crud/crud_user_url.py | 28 + crud/select_map.py | 30 + crud/user_label.py | 33 + db/__init__.py | 2 + db/ckdb.py | 79 + db/ckdb_utils.py | 15 + db/mongodb.py | 14 + db/mongodb_utils.py | 12 + init_db.py | 86 + main.py | 165 ++ models/__init__.py | 0 models/behavior_analysis.py | 890 +++++++++ models/user_analysis.py | 223 +++ models/user_label.py | 234 +++ models/x_analysis.py | 203 ++ schemas/__init__.py | 27 + schemas/api_board.py | 12 + schemas/api_list.py | 37 + schemas/api_log.py | 9 + schemas/api_module.py | 18 + schemas/authotity.py | 82 + schemas/base.py | 28 + schemas/check_data.py | 20 + schemas/dashboard.py | 105 + schemas/data_attr.py | 14 + schemas/data_auth.py | 24 + schemas/event_list.py | 10 + schemas/event_mana.py | 8 + schemas/folder.py | 41 + schemas/msg.py | 9 + schemas/proid_map.py | 0 schemas/project.py | 71 + schemas/project_number.py | 17 + schemas/report.py | 57 + schemas/role.py | 37 + schemas/select_map.py | 13 + schemas/space.py | 59 + schemas/sql.py | 37 + schemas/table_struct.py | 13 + schemas/token.py | 14 + schemas/url_list.py | 61 + schemas/user.py | 68 + schemas/user_url.py | 14 + schemas/userlabel.py | 62 + schemas/xquery.py | 13 + sql/end_chain.sql | 44 + sql/huiliu.sql | 26 + sql/start_chain.sql | 43 + sql/各渠道新增角色.sql | 6 + sql/总览.sql | 57 + sql/新增付费.sql | 17 + sql/留存.sql | 29 + sql/留存带分组.sql | 30 + update_api_list.py | 408 ++++ utils.py | 107 + utils/__init__.py | 5 + utils/casbin/__init__.py | 7 + utils/casbin/config/__init__.py | 1 + utils/casbin/config/config.py | 151 ++ utils/casbin/core_enforcer.py | 373 ++++ utils/casbin/distributed_enforcer.py | 132 ++ utils/casbin/effect/__init__.py | 24 + utils/casbin/effect/default_effectors.py | 61 + utils/casbin/effect/effector.py | 19 + utils/casbin/enforcer.py | 231 +++ utils/casbin/internal_enforcer.py | 122 ++ utils/casbin/management_enforcer.py | 271 +++ utils/casbin/model/__init__.py | 4 + utils/casbin/model/assertion.py | 47 + utils/casbin/model/function.py | 22 + utils/casbin/model/model.py | 80 + utils/casbin/model/policy.py | 190 ++ utils/casbin/model/policy_op.py | 5 + utils/casbin/persist/__init__.py | 4 + utils/casbin/persist/adapter.py | 46 + utils/casbin/persist/adapter_filtered.py | 13 + utils/casbin/persist/adapters/__init__.py | 2 + .../persist/adapters/adapter_filtered.py | 89 + utils/casbin/persist/adapters/file_adapter.py | 62 + .../casbin/persist/adapters/update_adapter.py | 9 + utils/casbin/persist/batch_adapter.py | 11 + utils/casbin/persist/dispatcher.py | 21 + utils/casbin/rbac/__init__.py | 1 + .../rbac/default_role_manager/__init__.py | 1 + .../rbac/default_role_manager/role_manager.py | 219 ++ utils/casbin/rbac/role_manager.py | 23 + utils/casbin/synced_enforcer.py | 600 ++++++ utils/casbin/util/__init__.py | 3 + utils/casbin/util/builtin_operators.py | 137 ++ utils/casbin/util/expression.py | 29 + utils/casbin/util/rwlock.py | 68 + utils/casbin/util/util.py | 72 + utils/export.py | 26 + utils/func.py | 152 ++ 153 files changed, 13728 insertions(+) create mode 100644 .gitignore create mode 100644 Pipfile create mode 100644 README.md create mode 100644 api/__init__.py create mode 100644 api/api_v1/__init__.py create mode 100644 api/api_v1/api.py create mode 100644 api/api_v1/authz/__init__.py create mode 100644 api/api_v1/authz/authz.py create mode 100644 api/api_v1/check_data/__init__.py create mode 100644 api/api_v1/check_data/controller.py create mode 100644 api/api_v1/check_data/service.py create mode 100644 api/api_v1/ck_mana/__init__.py create mode 100644 api/api_v1/ck_mana/event.py create mode 100644 api/api_v1/endpoints/__init__.py create mode 100644 api/api_v1/endpoints/authority.py create mode 100644 api/api_v1/endpoints/dashboard.py create mode 100644 api/api_v1/endpoints/data_auth.py create mode 100644 api/api_v1/endpoints/data_mana.py create mode 100644 api/api_v1/endpoints/event_mana.py create mode 100644 api/api_v1/endpoints/folder.py create mode 100644 api/api_v1/endpoints/project.py create mode 100644 api/api_v1/endpoints/query.py create mode 100644 api/api_v1/endpoints/report.py create mode 100644 api/api_v1/endpoints/space.py create mode 100644 api/api_v1/endpoints/test.py create mode 100644 api/api_v1/endpoints/user.py create mode 100644 api/api_v1/endpoints/xquery.py create mode 100644 api/api_v1/user_label/__init__.py create mode 100644 api/api_v1/user_label/controller.py create mode 100644 api/api_v1/user_label/service.py create mode 100644 api/deps.py create mode 100644 ck_test.py create mode 100644 common/__init__.py create mode 100644 common/compute.py create mode 100644 core/__init__.py create mode 100644 core/config.py create mode 100644 core/security.py create mode 100644 crud/__init__.py create mode 100644 crud/base.py create mode 100644 crud/crud_api_board.py create mode 100644 crud/crud_api_list.py create mode 100644 crud/crud_api_log.py create mode 100644 crud/crud_api_module.py create mode 100644 crud/crud_authority.py create mode 100644 crud/crud_check_data.py create mode 100644 crud/crud_dashboard.py create mode 100644 crud/crud_data_attr.py create mode 100644 crud/crud_data_auth.py create mode 100644 crud/crud_event_list.py create mode 100644 crud/crud_event_mana.py create mode 100644 crud/crud_folder.py create mode 100644 crud/crud_proid_map.py create mode 100644 crud/crud_project.py create mode 100644 crud/crud_project_number.py create mode 100644 crud/crud_report.py create mode 100644 crud/crud_role.py create mode 100644 crud/crud_space.py create mode 100644 crud/crud_url_list.py create mode 100644 crud/crud_user.py create mode 100644 crud/crud_user_url.py create mode 100644 crud/select_map.py create mode 100644 crud/user_label.py create mode 100644 db/__init__.py create mode 100644 db/ckdb.py create mode 100644 db/ckdb_utils.py create mode 100644 db/mongodb.py create mode 100644 db/mongodb_utils.py create mode 100644 init_db.py create mode 100644 main.py create mode 100644 models/__init__.py create mode 100644 models/behavior_analysis.py create mode 100644 models/user_analysis.py create mode 100644 models/user_label.py create mode 100644 models/x_analysis.py create mode 100644 schemas/__init__.py create mode 100644 schemas/api_board.py create mode 100644 schemas/api_list.py create mode 100644 schemas/api_log.py create mode 100644 schemas/api_module.py create mode 100644 schemas/authotity.py create mode 100644 schemas/base.py create mode 100644 schemas/check_data.py create mode 100644 schemas/dashboard.py create mode 100644 schemas/data_attr.py create mode 100644 schemas/data_auth.py create mode 100644 schemas/event_list.py create mode 100644 schemas/event_mana.py create mode 100644 schemas/folder.py create mode 100644 schemas/msg.py create mode 100644 schemas/proid_map.py create mode 100644 schemas/project.py create mode 100644 schemas/project_number.py create mode 100644 schemas/report.py create mode 100644 schemas/role.py create mode 100644 schemas/select_map.py create mode 100644 schemas/space.py create mode 100644 schemas/sql.py create mode 100644 schemas/table_struct.py create mode 100644 schemas/token.py create mode 100644 schemas/url_list.py create mode 100644 schemas/user.py create mode 100644 schemas/user_url.py create mode 100644 schemas/userlabel.py create mode 100644 schemas/xquery.py create mode 100644 sql/end_chain.sql create mode 100644 sql/huiliu.sql create mode 100644 sql/start_chain.sql create mode 100644 sql/各渠道新增角色.sql create mode 100644 sql/总览.sql create mode 100644 sql/新增付费.sql create mode 100644 sql/留存.sql create mode 100644 sql/留存带分组.sql create mode 100644 update_api_list.py create mode 100644 utils.py create mode 100644 utils/__init__.py create mode 100644 utils/casbin/__init__.py create mode 100644 utils/casbin/config/__init__.py create mode 100644 utils/casbin/config/config.py create mode 100644 utils/casbin/core_enforcer.py create mode 100644 utils/casbin/distributed_enforcer.py create mode 100644 utils/casbin/effect/__init__.py create mode 100644 utils/casbin/effect/default_effectors.py create mode 100644 utils/casbin/effect/effector.py create mode 100644 utils/casbin/enforcer.py create mode 100644 utils/casbin/internal_enforcer.py create mode 100644 utils/casbin/management_enforcer.py create mode 100644 utils/casbin/model/__init__.py create mode 100644 utils/casbin/model/assertion.py create mode 100644 utils/casbin/model/function.py create mode 100644 utils/casbin/model/model.py create mode 100644 utils/casbin/model/policy.py create mode 100644 utils/casbin/model/policy_op.py create mode 100644 utils/casbin/persist/__init__.py create mode 100644 utils/casbin/persist/adapter.py create mode 100644 utils/casbin/persist/adapter_filtered.py create mode 100644 utils/casbin/persist/adapters/__init__.py create mode 100644 utils/casbin/persist/adapters/adapter_filtered.py create mode 100644 utils/casbin/persist/adapters/file_adapter.py create mode 100644 utils/casbin/persist/adapters/update_adapter.py create mode 100644 utils/casbin/persist/batch_adapter.py create mode 100644 utils/casbin/persist/dispatcher.py create mode 100644 utils/casbin/rbac/__init__.py create mode 100644 utils/casbin/rbac/default_role_manager/__init__.py create mode 100644 utils/casbin/rbac/default_role_manager/role_manager.py create mode 100644 utils/casbin/rbac/role_manager.py create mode 100644 utils/casbin/synced_enforcer.py create mode 100644 utils/casbin/util/__init__.py create mode 100644 utils/casbin/util/builtin_operators.py create mode 100644 utils/casbin/util/expression.py create mode 100644 utils/casbin/util/rwlock.py create mode 100644 utils/casbin/util/util.py create mode 100644 utils/export.py create mode 100644 utils/func.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..de0e8b2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,132 @@ +# ---> Python +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +.idea \ No newline at end of file diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..8808d25 --- /dev/null +++ b/Pipfile @@ -0,0 +1,29 @@ +[[source]] +url = "https://pypi.douban.com/simple" +verify_ssl = false +name = "pypi" + +[packages] +fastapi = "*" +sqlalchemy = "*" +pymongo = "*" +uvicorn = "*" +motor = "*" +python-jose = "*" +passlib = "*" +pydantic = {extras = ["email"], version = "*"} +emails = "*" +python-multipart = "*" +gunicorn = "*" +simpleeval = "*" +aredis = "*" +aioch = "*" +aioredis = "*" +redis = "*" +bcrypt = "*" +pandas = "==1.2.3" + +[dev-packages] + +[requires] +python_version = "3.8" diff --git a/README.md b/README.md new file mode 100644 index 0000000..9bc158d --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ + + +人事后端 使用 mongo 数据库 +简历数据 使用CK数据库 + + + diff --git a/api/__init__.py b/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/api_v1/__init__.py b/api/api_v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/api_v1/api.py b/api/api_v1/api.py new file mode 100644 index 0000000..490546e --- /dev/null +++ b/api/api_v1/api.py @@ -0,0 +1,40 @@ +from fastapi import APIRouter +from api.api_v1.endpoints import user +from .endpoints import project +from .endpoints import folder +from .endpoints import space +from .endpoints import dashboard +from .endpoints import report +# from .endpoints import authority +from .endpoints import data_mana +from .endpoints import query +from .endpoints import xquery +from .endpoints import data_auth +from .endpoints import event_mana +from .endpoints import test +from .authz import authz +from .check_data import controller as check_data +from .user_label import controller as user_label + +api_router = APIRouter() +api_router.include_router(test.router, tags=["test"], prefix='/test') + +api_router.include_router(user.router, tags=["用户接口"], prefix='/user') +api_router.include_router(project.router, tags=["项目接口"], prefix='/project') +api_router.include_router(folder.router, tags=["文件夹接口"], prefix='/folder') +api_router.include_router(space.router, tags=["空间接口"], prefix='/space') +api_router.include_router(dashboard.router, tags=["看板接口"], prefix='/dashboard') +api_router.include_router(report.router, tags=["报表接口"], prefix='/report') + +# api_router.include_router(authority.router, tags=["权限管理接口"], prefix='/authority') +api_router.include_router(data_auth.router, tags=["数据权限"], prefix='/data_auth') + +api_router.include_router(data_mana.router, tags=["数据管理"], prefix='/data_mana') +api_router.include_router(event_mana.router, tags=["数据管理"], prefix='/data_mana') + +api_router.include_router(query.router, tags=["ck"], prefix='/ck') +api_router.include_router(xquery.router, tags=["xck"], prefix='/ck') + +api_router.include_router(authz.router, tags=["api接口管理"], prefix='/authz') +api_router.include_router(check_data.router, tags=["打点验证"], prefix='/check_data') +api_router.include_router(user_label.router, tags=["用户标签"], prefix='/user_label') diff --git a/api/api_v1/authz/__init__.py b/api/api_v1/authz/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/api_v1/authz/authz.py b/api/api_v1/authz/authz.py new file mode 100644 index 0000000..d029dd3 --- /dev/null +++ b/api/api_v1/authz/authz.py @@ -0,0 +1,616 @@ +from typing import Any + +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase + +import crud +import schemas +from api import deps +from db import get_database +from db.ckdb import CKDrive, get_ck_db +from db.redisdb import RedisDrive, get_redis_pool +from models.behavior_analysis import BehaviorAnalysis +from utils import casbin_enforcer + +router = APIRouter() + + +@router.post("/add_role_domain") +async def add_role_domain( + request: Request, + data_in: schemas.AddRoleForUsersInDomain, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + 在当前项目为角色添加相应权限 + """ + + # username role dom + # for item in data_in.data: + # is_exists_role = await crud.role.check(db, _id=item.role_id, game=item.game) + # if not is_exists_role: + # continue + # casbin_enforcer.add_role_for_user_in_domain(user=item.username, + # role=item.role_id, + # domain=item.game) + # + # return schemas.Msg(code=0, msg='添加成功', data=True) + res = await crud.url_list.get_all(db) + role_id = {} + for i in res: + role_id[i['auth_id']] = i['name'] + for item in data_in.data: + now_quanxian = await crud.user_url.get_quanxian(db, schemas.Url_quanxian(user_id=item.role_id)) + # 如果不存在该用户其他游戏的权限,则新增一个 + if now_quanxian == {}: + await crud.user_url.insert_quanxian(db, schemas.Url_quanxian(game=[item.game], user=item.username, + user_id=item.role_id, + quanxian=[role_id[item.auth_id]], + quanxian_id=[item.auth_id])) + # 存在则在这个用户加上要添加的游戏项目权限 + else: + game = now_quanxian['game'] + game.append(item.game) + quanxian = now_quanxian['quanxian'] + quanxian.append(role_id[item.auth_id]) + quanxian_id = now_quanxian['quanxian_id'] + quanxian_id.append('auth_id') + await crud.user_url.updata_quanxian(db, schemas.Url_quanxian(game=game, user=item.username, + user_id=item.role_id, quanxian=quanxian, + quanxian_id=quanxian_id)) + return schemas.Msg(code=0, msg='添加成功', data=True) + + +@router.post("/get_permissions_for_user_in_domain") +async def get_permissions_for_user_in_domain( + request: Request, + data_in: schemas.GetPermissionsForUserInDomain, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + 获取域内用户或角色的权限 + """ + #data为列表 + data = casbin_enforcer.get_permissions_for_user_in_domain(data_in.role_id, data_in.game) + paths = {i[2] for i in data} + #列表形式的coll_name + all_api = await crud.api_list.all_api(db) + for item in all_api: + if item['path'] in paths: + item['is_authz'] = True + else: + item['is_authz'] = False + + return schemas.Msg(code=0, msg='ok', data=all_api) + + +@router.post("/del_role_user_domain") +async def del_role_domain( + request: Request, + data_in: schemas.DeleteRolesForUserInDomain, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + 删除用户在当前项目中的权限 + """ + + # username role dom + # res = casbin_enforcer.delete_roles_for_user_in_domain(user=data_in.username, + # role=data_in.role_id, + # domain=data_in.game) + # + # #await crud.role.delete_id(db, data_in.role_id) + # return schemas.Msg(code=0, msg='ok', data=res) + res = await crud.user_url.get_all(db) + for i in res: + if i['user'] == data_in.username: + for nu in range(len(i['game'])): + if i['game'][nu] == data_in.game: + i['game'].remove(data_in.game) + i['quanxian_id'].remove(i['quanxian_id'][nu]) + i['quanxian'].remove(data_in.role_id) + await crud.user_url.updata_quanxian(db, schemas.Url_quanxian(game=i['game'], user=data_in.username, + user_id=i['user_id'], + quanxian_id=i['quanxian_id'], + quanxian=i['quanxian'])) + return schemas.Msg(code=0, msg='删除成功', data='') + + +@router.post("/del_role_user") +async def del_role_domain( + request: Request, + data_in: schemas.DeleteRolesForUserInDomain, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + 删除角色管理板块中的角色 + """ + await crud.url_list.delete_name(db, data_in) + return schemas.Msg(code=0, msg="ok", data='') + + +@router.post("/add_policy") +async def add_policy( + request: Request, + data_in: schemas.Datalist, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + 向当前权限添加新路由 + """ + # res = 0 + # for path in data_id.path_list: + # res = casbin_enforcer.add_policy(data_id.role_id, data_id.game, path, data_id.act) + # return schemas.Msg(code=0, msg='ok', data=res) + res = await crud.url_list.find_one_url(db, data_in) + for i in range(len(res['api_list'])): + if res['api_list'][i] == data_in.path: + res['state'][i] = True + await crud.url_list.update_url_url(db, res) + return schemas.Msg(code=0, msg='修改成功', data='') + +@router.post("/del_policy") +async def remove_policy( + request: Request, + data_in: schemas.Del_role, + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + 修改角色api权限 + """ + # res = casbin_enforcer.remove_policy(data_id.role_id, data_id.game, data_id.path, data_id.act) + # return schemas.Msg(code=0, msg='ok', data=res) + res = await crud.url_list.find_one_url(db, data_in) + for i in range(len(res['api_list'])): + if res['api_list'][i] == data_in.path: + res['state'][i] = False + await crud.url_list.update_url_url(db, res) + return schemas.Msg(code=0, msg='修改成功', data='') + + +@router.post("/del_api_module") +async def add_policy( + request: Request, + data_in: schemas.Add_module, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + res = await crud.api_module.get_one_module(db, data_in) + for i in range(len(res['state'])): + if data_in.url == res['api_list'][i]: + res['state'][i] = False + await crud.api_module.update_one_module(db, res) + return schemas.Msg(code=0, msg='修改成功', data='') + + +@router.post("/add_api_module") +async def add_policy( + request: Request, + data_in: schemas.Add_module, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + res = await crud.api_module.get_one_module(db, data_in) + for i in range(len(res['state'])): + if data_in.url == res['api_list'][i]: + res['state'][i] = True + await crud.api_module.update_one_module(db, res) + return schemas.Msg(code=0, msg='修改成功', data='') + + +@router.get("/api_list") +async def api_list( + request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + """ + + GetPermissionsForUserInDomain + 所有的api + """ + # res = await crud.api_list.all_api(db) + # return schemas.Msg(code=0, msg='ok', data=res) + re = await crud.api_module.get_api_module(db) + res = [] + for i in re: + if i['path_name'] != 'root': + i['_id'] = str(i['_id']) + res.append(i) + return schemas.Msg(code=0, msg='ok', data=res) + +@router.post("/add_api") +async def add_api( + request: Request, + data_in: schemas.AddApi, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 添加api + """ + # try: + # res = await crud.api_list.add_api(db, data_in) + # except Exception as e: + # return schemas.Msg(code=-1, msg='已经存在') + # return schemas.Msg(code=0, msg='ok', data=res.matched_count) + res = await crud.api_module.get_api_module(db) + for i in res: + if data_in.path in i['api_list']: + return schemas.Msg(code=0, msg='该路由已存在', data='') + path_list = [] + for i in res: + path_list.append(i['path_name']) + if data_in.name in path_list: + for i in res: + if data_in.name == i['path_name']: + i['api_list'].append(data_in.path) + i['api_name'].append(data_in.desc) + i['state'].append(True) + await crud.api_module.updata_quanxian_module(db, schemas.Url_module(auth_id=i['auth_id'], + path_name=data_in.name, + api_list=i['api_list'], + api_name=i['api_name'], + state=i['state'])) + return schemas.Msg(code=0, msg='ok', data='路由添加成功!') + else: + auth_list = [] + for i in res: + auth_list.append(i['auth_id']) + auth_id = max(auth_list) + # api_data={} + # api_data['auth_id']='abc'+str(int(auth_id.split('c')[-1])+1) + # api_data['path_name']=data_in.name + # api_data['api_list']=[data_in.path] + # api_data['api_name']=[data_in.desc] + # api_data['state']=[True] + auth_id = 'abc' + str(int(auth_id.split('c')[-1]) + 1) + await crud.api_module.insert_quanxian(db, schemas.Url_module(auth_id=auth_id, path_name=data_in.name, + api_list=[data_in.path], + api_name=[data_in.desc], state=[True])) + return schemas.Msg(code=0, msg='ok', data='路由添加成功!') + + +@router.post("/del_api") +async def del_api( + request: Request, + data_in: schemas.DelApi, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """ + 删除api + """ + # 删除规则 + paths = await crud.api_list.find_ids(db, data_in.ids, {'path': 1}) + for item in paths: + casbin_enforcer.remove_filtered_policy(2, item['path']) + + # 删除保存的记录 + res = await crud.api_list.del_api(db, data_in) + + return schemas.Msg(code=0, msg='ok', data=res.deleted_count) + + +@router.post("/edit_api") +async def edit_api( + request: Request, + data_in: schemas.EditApi, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """ + 编辑api + """ + res = await crud.api_list.edit_api(db, data_in) + return schemas.Msg(code=0, msg='ok', data=res.matched_count) + + +@router.get("/domain") +async def domain_list( + request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 获取所有项目 + """ + # roel dom path * + res = await crud.project.all_game(db) + return schemas.Msg(code=0, msg='ok', data=res) +@router.get("/api_module") +async def domain_list( + request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 角色管理创建角色时显示的各个模块 + """ + res = await crud.api_module.get_api_module(db) + api_module=[] + for i in res: + if i['path_name'] !='root': + data=[] + data.append(i['auth_id']) + data.append(i['path_name']) + api_module.append(data) + return schemas.Msg(code=0, msg='ok', data=api_module) + +@router.post("/add_roles") +async def add_roles( + request: Request, + game:str, + data_in: schemas.Add_role, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 创建角色 + """ + # try: + # res = await crud.role.add_role(db, data_in) + # return schemas.Msg(code=0, msg='ok', data=res.upserted_id) + # except Exception as e: + # return schemas.Msg(code=-1, msg='添加失败', data=str(e)) + res = await crud.url_list.get_all(db) + for i in res: + if data_in.system == 1: + if data_in.name == i['name']: + return schemas.Msg(code=0, msg='该角色已存在!') + else: + if data_in.name == i['name'] and i['game'] == game: + return schemas.Msg(code=0, msg='该角色已存在!') + auth = [] + if data_in.system == 1: + for i in res: + auth.append(i['auth_id']) + max_auth = 'ab' + str(int(max(auth).split('b')[-1]) + 1) + api_module = await crud.api_module.get_api_module(db) + for i in api_module: + if i['auth_id'] in data_in.path_name: + await crud.url_list.insert_url(db, schemas.Url_list(name=data_in.name, auth_id=max_auth, + path_name=i['path_name'], api_list=i['api_list'], + api_name=i['api_name'], state=i['state'], + system=data_in.system)) + else: + state = [] + for nu in range(len(i['state'])): + state.append(False) + if i['path_name'] != 'root': + await crud.url_list.insert_url(db, schemas.Url_list(name=data_in.name, auth_id=max_auth, + path_name=i['path_name'], + api_list=i['api_list'], api_name=i['api_name'], + state=state, system=data_in.system)) + return schemas.Msg(code=0, msg='添加角色成功', data='') + else: + for i in res: + auth.append(i['auth_id']) + max_auth = 'ab' + str(int(max(auth).split('b')[-1]) + 1) + api_module = await crud.api_module.get_api_module(db) + for i in api_module: + if i['auth_id'] in data_in.path_name: + await crud.url_list.insert_urls(db, schemas.Url_lists(name=data_in.name, auth_id=max_auth, + path_name=i['path_name'], api_list=i['api_list'], + api_name=i['api_name'], state=i['state'], + system=data_in.system, game=game)) + else: + state = [] + for nu in range(len(i['state'])): + state.append(False) + if i['path_name'] != 'root': + await crud.url_list.insert_urls(db, schemas.Url_lists(name=data_in.name, auth_id=max_auth, + path_name=i['path_name'], game=game, + api_list=i['api_list'], + api_name=i['api_name'], state=state, + system=data_in.system)) + return schemas.Msg(code=0, msg='添加角色成功', data='') + +@router.get("/roles") +async def roles( + request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 获取所有的管理员用户 + """ + # res = await crud.role.dom_roles(db, game) + # return schemas.Msg(code=0, msg='ok', data=res) + res = await crud.url_list.get_all(db) + role = [] + data = [] + # 区分不同项目下的权限用户 + for i in res: + if i['system'] == 1 and i['name'] != 'root': + role.append(i['name']) + if 'game' in i.keys(): + if game == i['game']: + role.append(i['name']) + # 得到不同权限用户 + role = list(set(role)) + for id in role: + data_dcit = {} + data_dcit['name'] = id + auth_id = [] + system = [] + data_list = [] + for i in res: + if i['name'] == id: + data_one = {} + auth_id.append(i['auth_id']) + system.append(i['system']) + data_one['path_name'] = i['path_name'] + data_one['api_name'] = i['api_name'] + data_one['api_list'] = i['api_list'] + data_one['state'] = i['state'] + data_list.append(data_one) + data_dcit['datalist'] = data_list + data_dcit['auth_id'] = auth_id[0] + data_dcit['system'] = system[0] + data.append(data_dcit) + return schemas.Msg(code=0, msg='ok', data=data) + +@router.post("/edit_role") +async def edit_role( + request: Request, + date_in: schemas.Editname, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 修改角色名 + """ + # res = await crud.role.edit_role(db, date_in) + # return schemas.Msg(code=0, msg='ok', data=res.matched_count) + await crud.url_list.edit_name(db,date_in) + return schemas.Msg(code=0,msg="ok") + +@router.get("/update_api_list") +async def update_api_list( + request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user), +): + """更新 api 列表""" + app = request.app + data = {} + for r in app.routes: + title = r.tags[0] if hasattr(r, 'description') else None + if not title: + continue + data.setdefault(title, {'list': []}) + path = r.path + name = r.description if hasattr(r, 'description') else r.name + data[title]['list'].append({'api': path, 'title': name}) + + data = [{'title': k, 'list': v['list']} for k, v in data.items()] + for item in data: + title = item['title'] + for l in item['list']: + api = l['api'] + name = l['title'] + add_data = schemas.UpdateApi(path=api, name=name) + await crud.api_list.update_api(db, add_data) + + return schemas.Msg(code=0, msg='ok', data=1) + + +@router.get("/account_owner_list") +async def account_owner_list(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """获取账号owner权限""" + account_infos = await crud.user.find_many(db, {}, + {'_id': False, 'name': True, 'nickname': True, + f'data_where.{game}': True}) + resp = [] + for account_info in account_infos: + resp.append( + { + 'name': account_info.get('name'), + 'nickname': account_info.get('nickname'), + 'owner_list': '' + } + ) + for item in account_info.get('data_where', {}).get(game, []): + if item.get('columnName') == 'owner_name': + resp[-1]['owner_list'] = ','.join(item.get('ftv', [])) + break + return schemas.Msg(code=0, msg='ok', data=resp) +# @router.post("/git_owner") +# async def git_owner(request: Request, +# game: str, +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: +# user=await crud.user + +@router.post("/update_account_owner") +async def account_owner_list(request: Request, + game: str, + data_in: schemas.OwnerList, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """设置账号owner权限""" + set_data = { + "columnName": "owner_name", + "tableType": "event", + "comparator": "in", + "ftv": data_in.owners + } + if not data_in.owners[0]: + res = await crud.user.update_one(db, {'name': data_in.account_name, + f'data_where.{game}': {'$exists': True} + }, + {'$pull': {f'data_where.{game}': {'columnName': 'owner_name'}}} + ) + return schemas.Msg(code=0, msg='ok', data=res.raw_result) + + is_exists = await crud.user.find_one(db, {'name': data_in.account_name, + f'data_where.{game}': {'$exists': True}, + }) + if is_exists: + if await crud.user.find_one(db, {'name': data_in.account_name, + f'data_where.{game}': {'$exists': True}, + f'data_where.{game}.columnName': 'owner_name' + }): + await crud.user.update_one(db, {'name': data_in.account_name, + f'data_where.{game}': {'$exists': True}, + f'data_where.{game}.columnName': 'owner_name' + }, {'$set': {f'data_where.{game}.$': set_data}}) + else: + await crud.user.update_one(db, {'name': data_in.account_name, + f'data_where.{game}': {'$exists': True}, + }, {'$push': {f'data_where.{game}': set_data}}) + else: + await crud.user.update_one(db, {'name': data_in.account_name, + }, {'$set': {f'data_where.{game}': [set_data]}}) + + return schemas.Msg(code=0, msg='ok') + +@router.get("/all_api_board") +async def all_api_board(request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """显示创建项目时生成的所有api权限模板""" + res = await crud.api_board.all_api(db) + for i in res: + i['_id'] = str(i['_id']) + return schemas.Msg(code=0, msg='ok', data=res) + +@router.post("/updata_api_board") +async def updata_api_board( + request: Request, + opinion: bool, + data_in: schemas.Api_board, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """ + 修改api权限模板 + """ + await crud.api_board.update(db, data_in,opinion) + return schemas.Msg(code=0, msg='ok') + +@router.post("/add_api_board") +async def add_api_board( + request: Request, + data_in: schemas.Api_board, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """ + 添加api权限模板 + """ + res = await crud.api_board.all_api(db) + for i in res: + if data_in.name ==i['name'] and data_in.api_name == i['api_name'] and data_in.api_path == i['api_path']: + return schemas.Msg(code=-1, msg='该路径已存在') + await crud.api_board.insert(db, data_in) + return schemas.Msg(code=0, msg='ok') + +@router.post("/del_api_board") +async def del_api_board( + request: Request, + data_in: schemas.Api_board, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: + """ + 删除api权限模板 + """ + await crud.api_board.del_api(db, data_in) + return schemas.Msg(code=0, msg='ok') \ No newline at end of file diff --git a/api/api_v1/check_data/__init__.py b/api/api_v1/check_data/__init__.py new file mode 100644 index 0000000..c39e2e0 --- /dev/null +++ b/api/api_v1/check_data/__init__.py @@ -0,0 +1 @@ +from .controller import router \ No newline at end of file diff --git a/api/api_v1/check_data/controller.py b/api/api_v1/check_data/controller.py new file mode 100644 index 0000000..d9a5d91 --- /dev/null +++ b/api/api_v1/check_data/controller.py @@ -0,0 +1,51 @@ +from fastapi import APIRouter, Request, Depends +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from api.api_v1.check_data import service +from db import get_database + +router = APIRouter() + + +@router.post("/check") +async def check(request: Request, + data_in: schemas.CheckData, + game: str, + ) -> schemas.Msg: + res = await service.check_data(game, data_in) + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.post("/save") +async def save(request: Request, + data_in: schemas.AddTemplate, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + ) -> schemas.Msg: + res = await service.save_template(db, data_in, game) + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.get('/template') +async def template(request: Request, game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + + ) -> schemas.Msg: + data = await service.get_template(db, game) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post('/del_template') +async def del_template(request: Request, game: str, data_in: schemas.DelTemplate, + db: AsyncIOMotorDatabase = Depends(get_database), + + ) -> schemas.Msg: + data = await service.del_template(db, data_in) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.get('/default_field') +async def template(request: Request, game: str) -> schemas.Msg: + data = service.get_default_field() + return schemas.Msg(code=0, msg='ok', data=data) diff --git a/api/api_v1/check_data/service.py b/api/api_v1/check_data/service.py new file mode 100644 index 0000000..ca7e6b5 --- /dev/null +++ b/api/api_v1/check_data/service.py @@ -0,0 +1,127 @@ +# coding:utf-8 + +import copy +import json +import re +from collections import namedtuple +from ipaddress import IPv4Address + +import numpy as np + +import clickhouse_driver + +import schemas +from core.config import settings +from db import get_database +from db.ckdb import ckdb as ck_client +import crud + +Type = namedtuple('Type', ['string', 'integer', 'array', 'ipv4']) +type_map = Type(string=str, integer=np.number, array=list, ipv4=IPv4Address) + + +async def check_data(game, data_in: schemas.CheckData): + db = data_in.db_name + saixuan=data_in.game + event_name = data_in.event_name + is_unique = data_in.is_unique + props = data_in.props + where = data_in.where + limit = 5 + check_type = copy.deepcopy(props) + check_type.update(data_in.default_field) + + select = ','.join([f'`{field}`' for field in check_type.keys()]) + if game == 'debug': + sql = f"""select {select} from {db}.event where game='{saixuan}' and `#event_name`='{event_name}'""" + else: + sql = f"""select {select} from {db}.event where game='{game}' and `#event_name`='{event_name}'""" + for k, v in where.items(): + sql += f""" and `{k}`='{v}'""" + + sql += f""" order by `#event_time` desc""" + sql += f""" limit {limit}""" + + print(sql) + # pass_list: [], fail_list: [] + # sql = 'show databases' + report = {'fail_list': [], + 'pass_list': []} + fail_list = report['fail_list'] + pass_list = report['pass_list'] + try: + df = await ck_client.query_dataframe(sql) + report['title'] = df.columns.tolist() + report['data'] = [] + for item in df.values: + report['data'].append([]) + report['data'][-1] = [str(i) for i in item] + + except clickhouse_driver.errors.ServerException as e: + if e.code == 47: + msg = re.match(r"""DB::Exception: Missing columns: '(.*)' while processing query""", e.message) + filed = '未知' + if msg: + filed = msg.group(1) + fail_list.append(f'
数据库不存在字段-> {filed}
') + else: + fail_list.append('数据库查询未知错误
') + + return report + + if df.empty: + fail_list.append('根据过滤条件未查到任何数据,也有可能是数据未及时入库。(3分钟后还没查到说明存在问题)
') + return report + if is_unique and len(df) > 1: + fail_list.append('警告:记录数大于一条
') + + for k, t in check_type.items(): + if t == 'json': + + if isinstance(df[k][0], str): + try: + json.loads(df[k][0]) + pass_list.append(f'通过:字段{k} 是期望的类型
') + continue + except: + fail_list.append( + f"""错误:字段{k} 期望{t}类型,不是json格式
""") + continue + else: + fail_list.append( + f"""错误:字段{k} 期望{t}类型,得到{re.findall("'(.*)'>", str(type(df[k][0])))[0]}
""") + continue + + if not isinstance(df[k][0], getattr(type_map, t)): + fail_list.append( + f"""错误:字段{k} 期望{t}类型,得到->{re.findall("'(.*)'>", str(type(df[k][0])))[0]}
""") + else: + pass_list.append(f'通过:字段{k} 是期望的类型
') + + return report + + +async def save_template(db, data_in: schemas.AddTemplate, + game: str, + ): + res = await crud.check_data.update_one(db, {'title': data_in.title, 'game': game}, + {'$set': {'game': game, + 'check': data_in.check.dict()}}, + upsert=True) + return True + + +async def get_template(db, game, **kwargs): + res = [] + async for doc in crud.check_data.find(db, {'game': game}, {'_id': False}, **kwargs): + res.append(doc) + return res + + +def get_default_field(): + return settings.DEFAULT_FIELD + + +async def del_template(db, data_id: schemas.DelTemplate): + await crud.check_data.delete(db, data_id.dict()) + return True diff --git a/api/api_v1/ck_mana/__init__.py b/api/api_v1/ck_mana/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/api_v1/ck_mana/event.py b/api/api_v1/ck_mana/event.py new file mode 100644 index 0000000..c81f727 --- /dev/null +++ b/api/api_v1/ck_mana/event.py @@ -0,0 +1,31 @@ +from collections import defaultdict + +import pandas as pd +import numpy as np +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase + +import crud, schemas +from common import * + +from api import deps +from db import get_database +from db.ckdb import get_ck_db, CKDrive +from db.redisdb import get_redis_pool, RedisDrive + +from models.behavior_analysis import BehaviorAnalysis +from models.user_analysis import UserAnalysis +from models.x_analysis import XAnalysis + +router = APIRouter() + + +@router.post("/update_event_view") +async def update_event_view( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ 更新事件视图 """ + pass diff --git a/api/api_v1/endpoints/__init__.py b/api/api_v1/endpoints/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/api_v1/endpoints/authority.py b/api/api_v1/endpoints/authority.py new file mode 100644 index 0000000..0016dca --- /dev/null +++ b/api/api_v1/endpoints/authority.py @@ -0,0 +1,264 @@ +# import pymongo +# from fastapi import APIRouter, Depends, Request +# from motor.motor_asyncio import AsyncIOMotorDatabase +# import crud, schemas +# from core.config import settings +# from core.security import get_password_hash +# +# from db import get_database +# from api import deps +# from db.ckdb import CKDrive, get_ck_db +# from utils import casbin_enforcer +# +# router = APIRouter() +# +# +# @router.get("/api_list") +# async def api_list(request: Request, +# current_user: schemas.UserDB = Depends(deps.get_current_user)) -> schemas.Msg: +# """api 列表""" +# app = request.app +# data = {} +# for r in app.routes: +# title = r.tags[0] if hasattr(r, 'description') else None +# if not title: +# continue +# data.setdefault(title, {'list': []}) +# path = r.path +# name = r.description if hasattr(r, 'description') else r.name +# data[title]['list'].append({'api': path, 'title': name}) +# +# res = [{'title': k, 'list': v['list']} for k, v in data.items()] +# +# return schemas.Msg(code=0, msg='ok', data=res) +# +# +# @router.post('/set_data_auth') +# async def set_data_auth(request: Request, +# data_id: schemas.DataAuthSet, +# game: str = Depends(deps.get_game_project), +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """设置用户数据权限""" +# await crud.authority.set_data_auth(db, data_id, game=game) +# return schemas.Msg(code=0, msg='ok', data=data_id) +# +# +# @router.get('/get_user_data_auth') +# async def get_user_data_auth(request: Request, +# game: str = Depends(deps.get_game_project), +# db: AsyncIOMotorDatabase = Depends(get_database), +# ck: CKDrive = Depends(get_ck_db), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """获取当前用户数据权限""" +# +# data_auth = await crud.authority.get_data_auth(db, username=request.user.name, game=game) +# if not data_auth: +# values = await ck.distinct(game, 'event', '#event_name') +# return schemas.Msg(code=0, msg='ok', data={ +# 'data': values, +# 'game': game, +# 'name': '全部事件' +# }) +# data_auth_id = data_auth['data_auth_id'] +# data = await crud.data_auth.get(data_auth_id) +# return schemas.Msg(code=0, msg='ok', data=data) +# +# +# # @router.get('/get_users_data_auth') +# # async def get_users_data_auth(request: Request, +# # game: str = Depends(deps.get_game_project), +# # db: AsyncIOMotorDatabase = Depends(get_database), +# # ck: CKDrive = Depends(get_ck_db), +# # current_user: schemas.UserDB = Depends(deps.get_current_user) +# # ) -> schemas.Msg: +# # """获取当前项目所有用户数据权限""" +# # +# # roles = await crud.authority.find_many(db, ptype='g', v2=game) +# # for item in roles: +# # user = item['v0'] +# # data_auth = await crud.authority.get_data_auth(db, username=request.user.name, game=game) +# # if not data_auth: +# # values = await ck.distinct(game, 'event', '#event_name') +# # return schemas.Msg(code=0, msg='ok', data={ +# # 'data': values, +# # 'game': game, +# # 'name': '全部事件' +# # }) +# # data_auth_id = data_auth['data_auth_id'] +# # data = await crud.data_auth.get(data_auth_id) +# # return schemas.Msg(code=0, msg='ok', data=data) +# # +# # # data_auth = await crud.authority.get_data_auth(db, username=request.user.name, game=game) +# # # if not data_auth: +# # # values = await ck.distinct(game, 'event', '#event_name') +# # # return schemas.Msg(code=0, msg='ok', data={ +# # # 'data': values, +# # # 'game': game, +# # # 'name': '全部事件' +# # # }) +# # # data_auth_id = data_auth['data_auth_id'] +# # # data = await crud.data_auth.get(data_auth_id) +# # return schemas.Msg(code=0, msg='ok') +# +# +# @router.post("/add_role") +# async def add_role(request: Request, +# data_in: schemas.CasbinRoleCreate, +# game: str = Depends(deps.get_game_project), +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """创建角色""" +# +# # 不允许角色名和用户名一样 +# if await crud.user.get_by_user(db, name=data_in.role_name): +# return schemas.Msg(code=-1, msg='请改个名字') +# role_dom = game +# api_dict = dict() +# for r in request.app.routes: +# api_dict[r.path] = r.description if hasattr(r, 'description') else r.name +# # 角色有的接口权限 +# for obj in data_in.role_api: +# casbin_enforcer.add_policy(data_in.role_name, role_dom, obj, '*') +# await crud.authority.update_one(db, {'ptype': 'p', 'v0': data_in.role_name, 'v1': role_dom, 'v2': obj}, +# {'$set': {'api_name': api_dict.get(obj)}}) +# +# # 管理员默认拥有该角色 方便从db中读出 +# await crud.authority.create(db, 'g', settings.SUPERUSER_NAME, data_in.role_name, role_dom, '*', +# role_name=data_in.role_name, +# game=role_dom) +# +# return schemas.Msg(code=0, msg='ok') +# +# +# @router.post("/add_sys_role") +# async def add_sys_role(request: Request, +# data_in: schemas.CasbinRoleCreate, +# game: str = Depends(deps.get_game_project), +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """创建系统角色""" +# api_dict = dict() +# +# # 不允许角色名和用户名一样 +# if await crud.user.get_by_user(db, name=data_in.role_name): +# return schemas.Msg(code=-1, msg='请改个名字') +# +# for r in request.app.routes: +# api_dict[r.path] = r.description if hasattr(r, 'description') else r.name +# # 角色有的接口权限 +# for obj in data_in.role_api: +# casbin_enforcer.add_policy(data_in.role_name, '*', obj, '*') +# await crud.authority.create(db, 'p', data_in.role_name, '*', obj, '*', api_name=api_dict.get(obj)) +# +# # 管理员默认拥有该角色 方便从db中读出 +# await crud.authority.create(db, 'g', settings.SUPERUSER_NAME, data_in.role_name, +# role_name=data_in.role_name, +# game='*') +# +# return schemas.Msg(code=0, msg='ok') +# +# +# @router.post("/add_account") +# async def add_account(request: Request, +# +# data_in: schemas.AccountsCreate, +# game: str = Depends(deps.get_game_project), +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """添加账号""" +# +# # 用户名不能与角色名重复 +# roles = casbin_enforcer.get_all_roles() +# accounts = {item.username for item in data_in.accounts} +# # 用户名不能与已存在的重复 +# exists_user = await crud.user.get_all_user(db) +# if accounts & set(roles) or accounts & set(exists_user): +# return schemas.Msg(code=-1, msg='已存在', data=list(set(accounts) & set(roles) | accounts & set(exists_user))) +# +# """创建账号 并设置角色""" +# for item in data_in.accounts: +# account = schemas.UserCreate(name=item.username, password=settings.DEFAULT_PASSWORD) +# try: +# await crud.user.create(db, account) +# except pymongo.errors.DuplicateKeyError: +# return schemas.Msg(code=-1, msg='用户名已存在') +# +# casbin_enforcer.add_grouping_policy(item.username, item.role_name, game) +# # 设置数据权限 +# await crud.authority.set_data_auth(db, +# schemas.DataAuthSet(username=item.username, data_auth_id=item.data_auth_id), +# game) +# +# # 添加到项目成员 +# await crud.project.add_members(db, schemas.ProjectMember(project_id=data_in.project_id, members=list(accounts))) +# +# return schemas.Msg(code=0, msg='ok') +# +# +# @router.get("/all_role") +# async def all_role(request: Request, +# db: AsyncIOMotorDatabase = Depends(get_database), +# game: str = Depends(deps.get_game_project), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """获取所有角色""" +# +# app = request.app +# api_data = {} +# for r in app.routes: +# title = r.tags[0] if hasattr(r, 'description') else None +# if not title: +# continue +# api_data[r.path] = { +# 'api': r.path, +# 'title': title, +# 'name': r.description if hasattr(r, 'description') else r.name +# } +# +# """获取域内所有角色""" +# roles = await crud.authority.find_many(db, {'role_name': {'$exists': 1}, 'game': game}) +# dom_data = [{'role': item['v1'], 'title': item['role_name'], 'id': str(item['_id'])} for item in roles] +# for item in dom_data: +# q = await crud.authority.get_role_dom_authority(db, item['role'], game, api_data) +# item['authority'] = [{'title': k, 'child': v} for k, v in q.items()] +# +# # 获取系统角色 +# roles = await crud.authority.find_many(db, {'role_name':{'$exists': 1}, 'game':'*'}) +# sys_data = [{'role': item['v1'], 'title': item['role_name'], 'id': str(item['_id'])} for item in roles] +# for item in sys_data: +# q = await crud.authority.get_role_dom_authority(db, item['role'], dom=game, api_data=api_data) +# item['authority'] = [{'title': k, 'child': v} for k, v in q.items()] +# +# data = { +# 'dom_role': dom_data, +# 'sys_role': sys_data +# } +# return schemas.Msg(code=0, msg='ok', data=data) +# +# # @router.post("/set_role") +# # async def set_role(request: Request, +# # data_id: schemas.AccountSetRole, +# # db: AsyncIOMotorDatabase = Depends(get_database), +# # current_user: schemas.UserDB = Depends(deps.get_current_user) +# # ) -> schemas.Msg: +# # """设置账号角色""" +# # casbin_enforcer.delete_user(data_id.name) +# # casbin_enforcer.add_role_for_user(data_id.name, data_id.role_name) +# # await crud.authority.update_one(db, {'ptype': 'g', 'v0': data_id.name}, dict(v1=data_id.role_name)) +# # +# # return schemas.Msg(code=0, msg='ok') +# +# # @router.get("/delete_user") +# # async def delete_user(request: Request, +# # data_id: schemas.AccountDeleteUser, +# # db: AsyncIOMotorDatabase = Depends(get_database), +# # current_user: schemas.UserDB = Depends(deps.get_current_user) +# # ) -> schemas.Msg: +# # pass +# # return schemas.Msg(code=0, msg='暂时没有') diff --git a/api/api_v1/endpoints/dashboard.py b/api/api_v1/endpoints/dashboard.py new file mode 100644 index 0000000..58d84c8 --- /dev/null +++ b/api/api_v1/endpoints/dashboard.py @@ -0,0 +1,260 @@ +import pymongo +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase +import crud, schemas + +from db import get_database +from api import deps +from utils.func import get_uid + +router = APIRouter() + + +@router.post("/create") +async def create( + data_in: schemas.DashboardCreate, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """创建看板""" + try: + await crud.dashboard.create(db, data_in, user_id=current_user.id) + except pymongo.errors.DuplicateKeyError: + return schemas.Msg(code=-1, msg='看板已存在', data='看板已存在') + + return schemas.Msg(code=0, msg='ok', data='创建成功') + + +@router.post('/edit_show_report') +async def edit_show_report( + request: Request, + data_in: schemas.EditShowReport, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + report_id = data_in.config.report_id + res = await crud.dashboard.update_one(db, {'_id': data_in.dashboard_id, 'reports.report_id': report_id}, + {'$set': {f'reports.$.{k}': v for k, v in + data_in.config.dict(skip_defaults=True).items()}}) + if res.modified_count == 1: + return schemas.Msg(code=0, msg='ok', data=data_in.config) + elif res.modified_count == 0: + return schemas.Msg(code=-1, msg='没有修改', data=dict()) + + +@router.post("/delete") +async def delete( + request: Request, + data_in: schemas.DashboardDelete, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """删除看板""" + del_dashboard = await crud.dashboard.delete(db, {'_id': {'$in': data_in.ids}}) + + if del_dashboard.deleted_count == 0: + return schemas.Msg(code=-1, msg='error', data='删除失败') + return schemas.Msg(code=0, msg='ok', data='删除成功') + + +@router.post("/move") +async def move( + data_in: schemas.DashboardMove, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 移动看板 + """ + for source_id in data_in.source_ids: + res = await crud.dashboard.update_one(db, {'_id': source_id}, + {'$set': dict(cat=data_in.cat, pid=data_in.dest_pid)}) + return schemas.Msg(code=0, msg='ok', data='移动成功') + + +@router.post("/sort") +async def sort( + game: str, + data_in: schemas.DashboardSort, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 看板排序 + """ + for item in data_in.sort: + await crud.dashboard.set_sort(db, index=item.dashboard_id, sort=item.sort) + return schemas.Msg(code=0, msg='ok', data=1) + + +@router.post("/copy") +async def copy( + request: Request, + data_in: schemas.DashboardCopy, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 复制到其他项目 + """ + # 检查是否存在默认空间 不存在就创建 + dest_project_id = data_in.dest_project_id + dest_default_space = await crud.space.find_one(db, {'project_id': dest_project_id, 'name': '默认空间'}, + {'_id': True}) + dest_space_id = dest_default_space.get('_id') + user_id = request.user.id + # 创建默认空间 + if not dest_space_id: + default_space = await crud.space.create(db, + schemas.SpaceCreate(name='默认空间', project_id=dest_project_id), + user=current_user) + dest_space_id = default_space.inserted_id + + dashboards = await crud.dashboard.find_many(db, {'_id': {'$in': data_in.source_ids}}, {'_id': False}) + for item in dashboards: + item['project_id'] = dest_project_id + item['pid'] = dest_space_id + item['cat'] = 'space' + item['user_id'] = user_id + item['_id'] = get_uid() + for report in item['reports']: + report_id = report['report_id'] + new_report = await crud.report.get(db, report_id) + new_report_id = get_uid() + report['report_id'] = new_report_id + new_report['user_id'] = user_id + new_report['_id'] = new_report_id + new_report['project_id'] = dest_project_id + try: + await crud.report.insert_one(db, new_report) + except: + exists_report = await crud.report.find_one(db, {'project_id': item['project_id'], + 'user_id': item['user_id'], 'name': report['name']}) + report['report_id'] = exists_report['_id'] + try: + await crud.dashboard.update_one(db, + {'project_id': item['project_id'], 'name': item['name'], + 'user_id': item['user_id']}, {'$set': item}, + upsert=True) + except: + pass + + return schemas.Msg(code=0, msg='ok', data='复制成功') + + +@router.post("/copy_to_my_space") +async def copy( + request: Request, + data_in: schemas.DashboardCopyToSpace, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 复制到自己空间 + """ + # 检查是否存在默认空间 不存在就创建 + dest_project_id = data_in.project_id + dest_space_id = data_in.dest_space_id + user_id = request.user.id + + dashboards = await crud.dashboard.find_many(db, {'_id': {'$in': data_in.source_ids}}, {'_id': False}) + for item in dashboards: + item['project_id'] = dest_project_id + item['pid'] = dest_space_id + item['user_id'] = user_id + item['cat'] = 'space' + item['_id'] = get_uid() + for report in item['reports']: + report_id = report['report_id'] + new_report = await crud.report.get(db, report_id) + new_report_id = get_uid() + report['report_id'] = new_report_id + new_report['_id'] = new_report_id + new_report['user_id'] = user_id + new_report['project_id'] = dest_project_id + try: + await crud.report.insert_one(db, new_report) + except: + exists_report = await crud.report.find_one(db, {'project_id': item['project_id'], + 'user_id': item['user_id'], 'name': report['name']}) + report['report_id'] = exists_report['_id'] + try: + await crud.dashboard.update_one(db, + {'project_id': item['project_id'], 'name': item['name'], + 'user_id': item['user_id']}, {'$set': item}, + upsert=True) + except: + pass + + return schemas.Msg(code=0, msg='ok', data='复制成功') + + +@router.post("/add_report") +async def add_report(data_in: schemas.AddReport, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """添加报表""" + reports = [item.dict() for item in data_in.report_ids] + # res = await crud.dashboard.update_one(db, {'_id': data_in.id}, + # {'$push': {'reports': {'$each': reports}}}) + await crud.dashboard.update_one(db, {'_id': data_in.id}, + {'$set': {'reports': reports}}) + return schemas.Msg(code=0, msg='ok', data='ok') + + +@router.post("/edit_report") +async def edit_report(data_in: schemas.EditReport, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """看板样式设置""" + + res = await crud.dashboard.update_one(db, {'_id': data_in.id, 'reports.report_id': data_in.report.report_id}, + {'$set': {f'reports.$.{k}': v for k, v in + data_in.report.dict(skip_defaults=True).items()}}) + + return schemas.Msg(code=0, msg='ok', data='ok') + + +@router.post("/del_report") +async def del_report( + game: str, + data_in: schemas.DelReport, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +): + """删除报表""" + del_item = {'report_id': data_in.report_id} + await crud.dashboard.update_one(db, {'_id': data_in.id}, {'$pull': {'reports': del_item}}) + return schemas.Msg(code=0, msg='ok', data='ok') + + +@router.post("/edit") +async def edit( + game: str, + data_in: schemas.EditDashboard, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +): + """编辑看板名""" + await crud.dashboard.update_one(db, {'_id': data_in.dashboard_id}, {'$set': {'name': data_in.new_name}}) + return schemas.Msg(code=0, msg='ok', data='ok') + + +@router.post("/") +async def dashboards(request: Request, + game: str, + data_in: schemas.ReadDashboard, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """获取一个看板""" + res = await crud.dashboard.get(db, id=data_in.id) + reports = {item['report_id']: item for item in res['reports']} + reports_detail = await crud.report.find_many(db, {'_id': {'$in': list(reports.keys())}}, {'query.cachedata': False}) + for item in reports_detail: + reports[item['_id']].update(item) + return schemas.Msg(code=0, msg='ok', data=reports) diff --git a/api/api_v1/endpoints/data_auth.py b/api/api_v1/endpoints/data_auth.py new file mode 100644 index 0000000..354ff60 --- /dev/null +++ b/api/api_v1/endpoints/data_auth.py @@ -0,0 +1,306 @@ +import json + +import pymongo +from bson import ObjectId +from fastapi import APIRouter, Depends, Request +from fastapi.encoders import jsonable_encoder +from motor.motor_asyncio import AsyncIOMotorDatabase +from redis import Redis + +import crud, schemas +from core.config import settings +from core.security import get_password_hash + +from db import get_database +from api import deps +from db.ckdb import CKDrive, get_ck_db +from db.redisdb import get_redis_pool, RedisDrive + +# from utils import casbin_enforcer + +router = APIRouter() + + +@router.post('/add_data_auth') +async def add_data_auth(request: Request, + data_id: schemas.DataAuthCreate, + game: str = Depends(deps.get_game_project), + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """创建数据权限""" + await crud.data_auth.create(db, data_id, game) + return schemas.Msg(code=0, msg='ok', data=data_id) + + +@router.post('/edit_data_auth') +async def edit_data_auth(request: Request, + data_id: schemas.DataAuthEdit, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """修改数据权限""" + await crud.data_auth.edit_data_auth(db, data_id) + return schemas.Msg(code=0, msg='ok', data=data_id) + + +@router.get("/quotas_map") +async def quotas_map( + request: Request, + game: str, + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + return schemas.Msg(code=0, msg='ok', data=settings.CK_OPERATOR) + + +@router.get("/filter_map") +async def filter_map( + request: Request, + game: str, + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + return schemas.Msg(code=0, msg='ok', data=settings.CK_FILTER) + + +@router.get('/all_event') +async def all_event(request: Request, + game: str, + ck: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """获取所有事件""" + values = await ck.distinct(game, 'event', '#event_name') + values.sort() + return schemas.Msg(code=0, msg='ok', data=values) + + +@router.get("/list") +async def data_authority(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: RedisDrive = Depends(get_redis_pool), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """获取前项目数据权限""" + total_event = await ck.distinct_count(game, 'event', '#event_name') + data = await crud.data_auth.get_game_data_auth(db, game) + for item in data: + item['id'] = str(item['_id']) + del item['_id'] + item['data_range'] = f'{len(item["data"])}/{total_event}' + + data = jsonable_encoder(data) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.get("/my_event") +async def my_event(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: RedisDrive = Depends(get_redis_pool), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """获取自己的事件权限""" + event_list = [] + # start_date = (datetime.datetime.now()-datetime.timedelta(days=30)).strftime('%Y-%m-%d %H:%M:%S') + # where = f"""`#event_time` > '{start_date}'""" + # my_data_auth = await ck.distinct(game, 'event', '#event_name',where) + my_data_auth = await rdb.smembers(f'{game}_event_set') + + # + # else: + # # 设置了数据权限 + # my_data_auth = await crud.data_auth.get(db, ObjectId(data_auth_id)) + # my_data_auth = my_data_auth['data'] + + event_show_name = await crud.event_mana.get_all_show_name(db, game) + event_list.append({'id': 'event', 'title': '全部事件', 'category': []}) + for item in my_data_auth: + event_list[-1]['category'].append({ + 'event_name': item, + 'event_desc': event_show_name.get(item, item) + }) + event_list[-1]['category'].append({'event_name': '*', 'event_desc': '任意事件'}) + event_list.sort() + return schemas.Msg(code=0, msg='ok', data=event_list) + + +@router.get("/user_property") +async def user_property(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: RedisDrive = Depends(get_redis_pool), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """获取用户属性""" + data = await rdb.get(f'{game}_user') + data = json.loads(data) + propertys = [] + + data_attr = await crud.data_attr.find_many(db, {'game': game, 'cat': 'user'}) + data_attr = {item['name']: item for item in data_attr} + + for k, v in data.items(): + data_type = settings.CK_TYPE_DICT.get(v) + propertys.append( + {'name': k, + 'data_type': data_type, + 'show_name': data_attr.get(k, {}).get('show_name', ''), + } + ) + propertys = sorted(propertys, key=lambda x: x['show_name']) + + return schemas.Msg(code=0, msg='ok', data=propertys) + + +@router.post('/load_prop_quotas') +async def load_prop_quotas(request: Request, + game: str, + data_in: schemas.LoadProQuotas, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: RedisDrive = Depends(get_redis_pool), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """事件属性 聚合条件""" + + event_columns = await ck.get_columns(game, 'event') + + data_attr = await crud.data_attr.find_many(db, {'game': game, 'cat': 'event'}) + data_attr = {item['name']: item for item in data_attr} + event_props = [] + for item in event_columns: + data_type = settings.CK_TYPE_DICT.get(item['type']) + title = data_attr.get(item['name'], {}).get('show_name') or item['name'] + event_prop = { + 'id': item['name'], + 'data_type': data_type, + 'title': title, + # 'category': settings.CK_OPERATOR.get(data_type) or [] + } + event_props.append(event_prop) + if data_in.model == 'scatter': + staid_quots = [ + { + "id": "*", + "data_type": None, + "analysis": "times", + "title": "次数", + }, + { + "id": "*", + "data_type": None, + "analysis": "number_of_days", + "title": "天数", + }, + { + "id": "*", + "data_type": None, + "analysis": "number_of_hours", + "title": "小时数", + }, + ] + else: + staid_quots = [ + { + "id": "*", + "data_type": None, + "analysis": "total_count", + "title": "总次数", + }, + { + "id": "*", + "analysis": "touch_user_count", + "data_type": None, + "title": "触发用户数", + }, + { + "id": "*", + "analysis": "touch_user_avg", + "data_type": None, + "title": "人均次数", + }, + ] + + res = { + 'props': event_props, + 'staid_quots': staid_quots + + } + + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.post('/load_filter_props') +async def load_filter_props(request: Request, + game: str, + data_in: schemas.LoadProQuotas, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: RedisDrive = Depends(get_redis_pool), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """事件属性 过滤条件""" + + event_columns = await ck.get_columns(game, 'event') + user_columns = await ck.get_columns(game, 'user') + + data_attr = await crud.data_attr.find_many(db, {'game': game, 'cat': 'event'}) + data_attr = {item['name']: item for item in data_attr} + event_props = [] + for item in event_columns: + data_type = settings.CK_TYPE_DICT.get(item['type']) + title = data_attr.get(item['name'], {}).get('show_name') or item['name'] + event_prop = { + 'id': item['name'], + 'data_type': data_type, + 'title': title, + } + event_props.append(event_prop) + + data_attr = await crud.data_attr.find_many(db, {'game': game, 'cat': 'user'}) + data_attr = {item['name']: item for item in data_attr} + user_props = [] + for item in user_columns: + data_type = settings.CK_TYPE_DICT.get(item['type']) + title = data_attr.get(item['name'], {}).get('show_name') or item['name'] + user_prop = { + 'id': item['name'], + 'data_type': data_type, + 'title': title, + } + user_props.append(user_prop) + + user_label_props = [] + user_label_docs = await crud.user_label.find_many(db, {'game': game}, {'qp': 0}) + for item in user_label_docs: + tmp = { + 'id': item['cluster_name'], + 'data_type': 'user_label', + 'title': item['display_name'], + } + user_label_props.append(tmp) + res = [ + { + 'title': '事件属性', + 'id': 'event', + 'category': event_props + }, + { + 'title': '用户属性', + 'id': 'user', + 'category': user_props + }, + { + 'title': '用户标签', + 'id': 'user_label', + 'category': user_label_props + } + ] + + return schemas.Msg(code=0, msg='ok', data=res) diff --git a/api/api_v1/endpoints/data_mana.py b/api/api_v1/endpoints/data_mana.py new file mode 100644 index 0000000..0d112e4 --- /dev/null +++ b/api/api_v1/endpoints/data_mana.py @@ -0,0 +1,217 @@ +import json + +from aioredis import Redis +from fastapi import APIRouter, Depends, Request, File +from motor.motor_asyncio import AsyncIOMotorDatabase +import pandas as pd + +import crud, schemas + +from api import deps +from core.config import settings +from db import get_database +from db.ckdb import CKDrive, get_ck_db +from db.redisdb import get_redis_pool +from utils import estimate_data,dict_to_str + +router = APIRouter() + +__all__ = 'router', + + +@router.get("/attr_list") +async def read_data_attr( + request: Request, + game: str, + cat: str, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: Redis = Depends(get_redis_pool), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """事件属性列表或用户属性列表""" + data = await rdb.get(f'{game}_{cat}') + data = json.loads(data) + res = [] + + data_attr = await crud.data_attr.find_many(db, {'game': game, 'cat': cat}) + data_attr = {item['name']: item for item in data_attr} + + for k, v in data.items(): + res.append( + {'name': k, + 'data_type': settings.CK_TYPE_DICT.get(v), + 'show_name': data_attr.get(k, {}).get('show_name', ''), + 'is_show': data_attr.get(k, {}).get('is_show', True), + 'attr_type': '预置属性' if k.startswith('#') else '自定义属性', + 'unit': '' + } + ) + return schemas.Msg(code=0, msg='ok', data=res) +@router.post("/game_user_event_list") +async def read_data_attr( + request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """用户搜索时显示的用户属性""" + # data = await rdb.get(f'{game}_{data_in.cat}') + # data = json.loads(data) + # res = list(data.keys()) + #event_columns = await ck.get_columns(game, 'event') + user_columns = await ck.get_columns(game, 'user') + data_attr = await crud.data_attr.find_many(db, {'game': game, 'cat': 'user'}) + data_attr = {item['name']: item for item in data_attr} + user_props = [] + for item in user_columns: + data_type = settings.CK_TYPE_DICT.get(item['type']) + title = data_attr.get(item['name'], {}).get('show_name') or item['name'] + user_prop = { + 'id': item['name'], + 'data_type': data_type, + 'title': title, + } + user_props.append(user_prop) + + user_label_props = [] + user_label_docs = await crud.user_label.find_many(db, {'game': game}, {'qp': 0}) + for item in user_label_docs: + tmp = { + 'id': item['cluster_name'], + 'data_type': 'user_label', + 'title': item['display_name'], + } + user_label_props.append(tmp) + res = [ + { + 'title': '用户属性', + 'id': 'user', + 'category': user_props + }, + { + 'title': '用户标签', + 'id': 'user_label', + 'category': user_label_props + } + ] + return schemas.Msg(code=0, msg='ok', data=res) + +@router.post("/attr_edit") +async def edit_data_attr( + request: Request, + game: str, + data_in: schemas.DataAttrEdit, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: Redis = Depends(get_redis_pool), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """编辑事件属性""" + await crud.data_attr.edit_data_attr(db, game, data_in) + return schemas.Msg(code=0, msg='ok', data=data_in) + + + +# @router.post("/add_select_map") +# async def add_map( +# request: Request, +# game: str, +# data_in: schemas.SelectMap, +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ) -> schemas.Msg: +# """添加属性值选择映射""" +# +# """ +# { +# game:aaa, +# attr_name:bbb, +# map_:{ +# '区服aa':'1', +# '区服vvv':'5', +# } +# +# } +# """ +# await crud.select_map.save(db, data_in) +# return schemas.Msg(code=0, msg='ok', data=data_in) + +#在gametoos同步区服了,所以不需要这段代码 +@router.post("/add_select_map") +async def add_select_map( + request: Request, + game: str, + file: bytes = File(...), + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """添加游戏区服信息选择映射""" + dfs = pd.read_excel(file, engine='openpyxl', sheet_name=None) + for attr_name, df in dfs.items(): + #将id这列转换成字符串类型 + if len(df) >0: + df['id'] = df['id'].astype(str) + map_ = df.to_dict('records') + data_in = schemas.SelectMap(game=game, attr_name=attr_name, map_=map_) + await crud.select_map.save(db, data_in) + return schemas.Msg(code=0, msg='ok', data=1) + + +@router.get("/select_list") +async def select_list( + request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """属性值选择映射列表""" + #当游戏为魔法门H5时,把game的值改为数据库中对应的值(mdb中的值和ck中的值是不一样的) + if game == 'mfmh5': + game='mzmfmh5' + resp = await crud.select_map.get_list(db, game) + return schemas.Msg(code=0, msg='ok', data=resp) + + +@router.post("/select_attr") +async def select_attr( + request: Request, + game: str, + data_in: schemas.SelectAttr, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """属性值选择映射""" + resp = await crud.select_map.get_select(db, data_in, game) + code = 0 if resp else -9 + if resp: + if 'map_' in resp.keys(): + return schemas.Msg(code=code, msg='ok', data=resp) + else: + resp['map_'] = resp.pop('owner_name') + return schemas.Msg(code=code, msg='ok', data=resp) + else: + return schemas.Msg(code=code, msg='ok', data=resp) + +@router.post("/add_attr") +async def add_attr( + request: Request, + game: str, + data_in: schemas.Add_attr, + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: Redis = Depends(get_redis_pool), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """添加事件属性或添加用户属性""" + data = await rdb.get(f'{game}_{data_in.cat}') + data = json.loads(data) + if data_in.state =='add': + #判断传入数据类型 + new_data_type=estimate_data(data_in.data_type) + #添加数据 + data[data_in.new_attribute]=new_data_type + else: + del data[data_in.new_attribute] + #将字典转为字符串 + str_data=dict_to_str(data) + await rdb.set(f'{game}_{data_in.cat}',str_data) + return schemas.Msg(code=0, msg='ok') \ No newline at end of file diff --git a/api/api_v1/endpoints/event_mana.py b/api/api_v1/endpoints/event_mana.py new file mode 100644 index 0000000..d390a6e --- /dev/null +++ b/api/api_v1/endpoints/event_mana.py @@ -0,0 +1,64 @@ +import json + +from aioredis import Redis +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase +import pandas as pd + +import crud, schemas + +from api import deps +from core.config import settings +from db import get_database +from db.ckdb import CKDrive, get_ck_db +from db.redisdb import get_redis_pool + +router = APIRouter() + +__all__ = 'router', + + +@router.get("/event_list") +async def event_list( + request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """事件列表""" + #获取事件名 + try: + event_list = await ckdb.distinct(game, 'event', '#event_name') + # 获取事件量 + event_count = await ckdb.yesterday_event_count(game) + event_meta = await crud.event_mana.find_many(db, {'game': game}) or {} + except Exception as e: + return schemas.Msg(code=-9, msg='查无数据', data='') + if event_meta: + event_meta = pd.DataFrame(event_meta).set_index('event_name').fillna('').T.to_dict() + + res = [] + for name in event_list: + res.append({ + 'name': name, + 'show_name': event_meta.get(name, {}).get('show_name', ''), + 'is_show': event_meta.get(name, {}).get('is_show', True), + 'desc': event_meta.get(name, {}).get('desc', ''), + 'event_count': event_count.get(name, {}).get('v') + } + ) + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.post("/event_edit") +async def event_edit( + request: Request, + game: str, + data_in: schemas.EventMateEdit, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """编辑事件""" + await crud.event_mana.edit_event_mate(db, game, data_in) + return schemas.Msg(code=0, msg='ok', data=data_in) diff --git a/api/api_v1/endpoints/folder.py b/api/api_v1/endpoints/folder.py new file mode 100644 index 0000000..036ab31 --- /dev/null +++ b/api/api_v1/endpoints/folder.py @@ -0,0 +1,40 @@ +import pymongo +from fastapi import APIRouter, Depends +from motor.motor_asyncio import AsyncIOMotorDatabase +import crud, schemas + +from db import get_database +from api import deps + +router = APIRouter() + + +@router.post("/create") +async def create( + data_in: schemas.FolderCreate, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """创建文件夹""" + try: + await crud.folder.create(db, data_in, user_id=current_user.id) + except pymongo.errors.DuplicateKeyError: + return schemas.Msg(code=-1, msg='文件夹已存在', data='文件夹已存在') + + return schemas.Msg(code=0, msg='ok', data='创建成功') + + +@router.post("/delete") +async def delete( + data_in: schemas.FolderDelete, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """删除文件夹""" + # 删除文件夹 自己创建的 + del_folder = await crud.folder.delete(db, _id=data_in.id, user_id=current_user.id) + # 删除文件夹下的 dashboard + del_dashboard = await crud.dashboard.delete(db, pid=data_in.id) + if del_folder.deleted_count == 0: + return schemas.Msg(code=-1, msg='error', data='删除失败') + return schemas.Msg(code=0, msg='ok', data='删除成功') diff --git a/api/api_v1/endpoints/project.py b/api/api_v1/endpoints/project.py new file mode 100644 index 0000000..0eef266 --- /dev/null +++ b/api/api_v1/endpoints/project.py @@ -0,0 +1,369 @@ +import pymongo +from bson import ObjectId +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase +import crud, schemas +from api import deps +from core.config import settings +from db import get_database +from db.ckdb import CKDrive, get_ck_db +from schemas.project import ProjectCreate +# from utils import casbin_enforcer +from utils import casbin_enforcer + +router = APIRouter() + + +@router.post("/create") +async def create( + request: Request, + data_in: ProjectCreate, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """创建项目""" + try: + res_project = await crud.project.create(db, data_in, current_user=request.user) + await crud.project_number.createxiangmu(db, data_in) + # 同步插入项目 + # await crud.project_number.createxiangmu(db, data_in) + # 同步存入root权限中新项目的权限 + user_url = await crud.user_url.get_quanxian(db, + schemas.Url_quanxian(user_id='04491842be9811eb8acdd5bd867f57d6')) + user_url['game'].append(data_in.game) + user_url['quanxian_id'].append('ab1') + user_url['quanxian'].append('root') + await crud.user_url.updata_quanxian(db, schemas.Url_quanxian(user=user_url['user'], user_id=user_url['user_id'], + game=user_url['game'], + quanxian_id=user_url['quanxian_id'], + quanxian=user_url['quanxian'])) + except pymongo.errors.DuplicateKeyError: + return schemas.Msg(code=-1, msg='项目名已存在', data='项目名已存在') + + folder = schemas.FolderCreate( + name='未分组', + project_id=res_project.inserted_id, + cat='kanban', + pid=res_project.inserted_id, + ) + await crud.folder.create(db, folder, user_id=current_user.id) + folder = schemas.FolderCreate( + name='共享给我的', + project_id=res_project.inserted_id, + cat='kanban', + pid=res_project.inserted_id, + ) + await crud.folder.create(db, folder, user_id=current_user.id) + + # # 创建全部数据权限 + # data_auth = schemas.DataAuthCreate(name='全部', data=['*']) + # await crud.data_auth.create(db, data_auth, data_in.game) + + # 新建项目管理员权限 + # role_name = f'{data_in.game}_admin' + # role_dom = data_in.game + # casbin_enforcer.add_policy(role_name, role_dom, '*', '*') + # await crud.authority.create(db, 'p', role_name, role_dom, '*', '*') + # 添加角色 + # await crud.authority.create(db, 'g', settings.SUPERUSER_NAME, role_name, '*', '*', role_name='系统项目管理员', game='*') + # # 添加数据权限 + # await crud.authority.set_data_auth(db, schemas.DataAuthSet(username=request.user.username, data_auth_id='*'), + # game=data_in.game, v1=role_name) + return schemas.Msg(code=0, msg='创建成功') + + +@router.get("/") +async def read_project(request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """查看自己拥有的项目""" + if request.user.username == 'root': + resp = await crud.project.all_game(db) + resp = sorted(resp, key=lambda x: x.get('sort') or 999) + else: + # game_list = casbin_enforcer.get_domains_for_user(request.user.username) + # resp = await crud.project.get_my_game(db, game_list) + project_data = await crud.user_url.get_quanxian(db, schemas.Url_quanxian(user_id=request.user.id)) + resp = await crud.project.get_my_game(db, project_data['game']) + return schemas.Msg(code=0, msg='ok', data=resp) +#获取项目名和渠道名project_name +@router.get("/project_name") +async def project_name(request: Request, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + if request.user.username == 'root': + res = await crud.project_number.all_xiangmu(db) + for i in res: + i['_id'] = str(i['_id']) + return schemas.Msg(code=0,msg='ok',data=res) +#添加项目名,渠道名 +@router.post("/add_project_name") +async def add_project_name(request: Request, + data_in: schemas.ProjectnumberInsert, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user)): + #插入数据 + #await crud.project_number.create(db, data_in) + #修改数据 + await crud.project_number.update(db, data_in) + return schemas.Msg(code=0, msg='修改成功', data=True) +@router.get("/detail") +async def detail(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + ck: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """查看项目信息""" + res = await crud.project.find_one(db, {'game': game}) + event_count = await ck.count(game, 'event') + user_count = await ck.count(game, 'user_view') + event_type_count = await ck.distinct_count(game, 'event', '#event_name') + event_attr_count = await ck.field_count(db=game, tb='event') + user_attr_count = await ck.field_count(db=game, tb='user_view') + + res['event_count'] = event_count + res['user_count'] = user_count + res['event_type_count'] = event_type_count + res['event_attr_count'] = event_attr_count + res['user_attr_count'] = user_attr_count + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.post("/rename") +async def rename_project(request: Request, + game: str, + data_in: schemas.ProjectRename, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """修改项目名""" + try: + res = await crud.project.rename(db, data_in) + except pymongo.errors.DuplicateKeyError: + return schemas.Msg(code=-1, msg='项目名已存在') + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.post("/add_members") +async def add_members(request: Request, + game: str, + data_in: schemas.ProjectAddMember, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """项目添加成员""" + + for item in data_in.members: + # casbin_enforcer.add_grouping_policy(item.username, item.role_name, game) + # # 设置数据权限 + # await crud.authority.set_data_auth(db, + # schemas.DataAuthSet(username=item.username, data_auth_id=item.data_auth_id), + # game) + + folder = schemas.FolderCreate( + name='未分组', + project_id=data_in.project_id, + cat='kanban', + pid=data_in.project_id, + ) + await crud.folder.create(db, folder, user_id=item.user_id) + + await crud.project.add_members(db, schemas.ProjectMember(project_id=data_in.project_id, + members=[item.username for item in data_in.members])) + + return schemas.Msg(code=0, msg='ok', data=data_in) +@router.post("/import_member") +async def import_member(request: Request, + game:str, + data_in:schemas.Import_project, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """成员管理中的导入其他项目的成员到本项目中""" + res=await crud.user_url.get_all(db) + for i in res: + for nu in range(len(i['game'])): + if data_in.games == i['game'][nu] and game not in i['game']: + i['game'].append(game) + i['quanxian_id'].append(i['quanxian_id'][nu]) + i['quanxian'].append(i['quanxian'][nu]) + await crud.user_url.updata_quanxian(db,schemas.Url_quanxian(game=i['game'],user=i['user'],user_id=i['user_id'],quanxian_id=i['quanxian_id'], + quanxian=i['quanxian'])) + + return schemas.Msg(code=0, msg='ok',data='' ) + +@router.post("/edit_member") +async def edit_member(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """编辑成员权限 角色和数据""" + + pass + return schemas.Msg(code=0, msg='ok', ) + + +# +# +@router.get("/members") +async def members(request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """查看项目成员""" + # data = casbin_enforcer.get_all_users_by_domain(game) + # names = [] + # role_ids = [] + # for item in data: + # names.append(item['username']) + # role_ids.append(item['role_id']) + # users = await crud.user.get_by_users(db, {'name': {'$in': names}}) + # roles = await crud.role.find_ids(db, role_ids) + # users = {item.name: item.dict() for item in users.data} + # roles = {item['_id']: item['name'] for item in roles} + # res = [] + # for item in data: + # username = item['username'] + # role_id = item['role_id'] + # try: + # res.append({ + # **users[username], + # 'role': roles[role_id], + # 'role_id': role_id, + # }) + # except: + # pass + # # res.append({ + # # **users[username], + # # 'role': roles[role_id], + # # 'role_id': role_id, + # # }) + # return schemas.Msg(code=0, msg='ok', data=res) + res = await crud.user_url.get_all(db) + # 符合当前项目权限的用户 + names = [] + # 符合当前项目权限的用户的对应权限级别 + quanxian = {} + quanxian_id = {} + for i in res: + for nu in range(len(i['game'])): + if game == i['game'][nu]: + names.append(i['user']) + quanxian[i['user']] = i['quanxian'][nu] + quanxian_id[i['user']] = i['quanxian_id'][nu] + users = await crud.user.get_by_users(db, {'name': {'$in': names}}) + data = [] + for use in users.data: + use = use.dict() + use['role'] = quanxian[use['name']] + use['role_id'] = quanxian[use['name']] + data.append(use) + return schemas.Msg(code=0, msg='ok', data=data) +# @router.post("/del_member") +# async def members(request: Request, +# game: str, +# data_in: schemas.ProjectDelMember, +# db: AsyncIOMotorDatabase = Depends(get_database), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ): +# """删除项目成员""" +# # casbin_enforcer.delete_roles_for_user_in_domain(data_in.username, data_in.role, game) +# await crud.project.del_members(db, data_in) +# # await crud.authority.delete(db, ptype='g', v2=game, v0=data_in.username) +# return schemas.Msg(code=0, msg='ok') + + +@router.post("/clean") +async def read_kanban( + data_in: schemas.ProjectClean, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +): + """ + 清理项目 删除项目所有内容 + :param data_in: + :param db: + :param current_user: + :return: + """ + # 删除 报表 + await crud.report.delete(db, {'project_id': data_in.project_id}) + # 删除 空间 + await crud.space.delete(db, {'project_id': data_in.project_id}) + # 删除 文件夹 + await crud.folder.delete(db, {'project_id': data_in.project_id}) + # 删除 看板 + await crud.dashboard.delete(db, {'project_id': data_in.project_id}) + + return schemas.Msg(code=0, msg='ok') + + +@router.post("/kanban") +async def read_kanban( + data_in: schemas.ProjectKanban, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +): + """获取自己的看板""" + res = {'kanban': [], 'spaces': []} + # 我的看板 + kanban = await crud.folder.read_folder(db, project_id=data_in.id, user_id=current_user.id, cat='kanban') + + for item in kanban: + res['kanban'].append({ + 'name': item['name'], + 'children': [], + '_id': item['_id'] + }) + async for d in crud.dashboard.find(db, {'pid': item['_id']}).sort([('sort', 1)]): + res['kanban'][-1]['children'].append({ + 'name': d['name'], + '_id': d['_id'] + }) + + # 我的空间 + where = { + 'project_id': data_in.id, + 'members.user_id': current_user.id + # '$or': [{'rw_members': current_user.id}, {'r_members': current_user.id}] + } + spaces = await crud.space.find_many(db, where) + # 空间 文件夹 看板 + for item in spaces: + res['spaces'].append({ + 'name': item['name'], + 'children': [], + '_id': item['_id'] + }) + authority = {item['user_id']: item['authority'] for item in item['members']} + res['spaces'][-1]['authority'] = authority.get(current_user.id, 'r') + + for f in await crud.folder.find_many(db, {'pid': item['_id']}): + res['spaces'][-1]['children'].append({ + 'name': f['name'], + '_id': f['_id'], + 'children': [], + 'isFolder': True + }) + + async for d in crud.dashboard.find(db, {'pid': item['_id']}).sort([('sort', 1)]): + res['spaces'][-1]['children'][-1]['children'].append({ + 'name': d['name'], + '_id': d['_id'] + }) + + # 空间 看板 + async for d in crud.dashboard.find(db, {'pid': item['_id']}).sort([('sort', 1)]): + res['spaces'][-1]['children'].append({ + 'name': d['name'], + '_id': d['_id'], + 'user_id':d['user_id'], + 'isFolder': False + }) + + return schemas.Msg(code=0, msg='ok', data=res) diff --git a/api/api_v1/endpoints/query.py b/api/api_v1/endpoints/query.py new file mode 100644 index 0000000..423f20e --- /dev/null +++ b/api/api_v1/endpoints/query.py @@ -0,0 +1,1772 @@ +import datetime +from collections import defaultdict +import mimetypes +from urllib.parse import quote +import os +import pandas as pd +import numpy as np +from fastapi import APIRouter, Depends, Request,File +from fastapi.encoders import jsonable_encoder +from motor.motor_asyncio import AsyncIOMotorDatabase +from fastapi.responses import StreamingResponse +#from datetime import datetime +import crud, schemas +from common import * + +from api import deps +from db import get_database +from db.ckdb import get_ck_db, CKDrive +from db.redisdb import get_redis_pool, RedisDrive + +from models.behavior_analysis import BehaviorAnalysis, CombinationEvent +from models.user_analysis import UserAnalysis +from models.x_analysis import XAnalysis +from utils import DfToStream, getEveryDay, Download_xlsx,jiange_insert,create_df,create_neidf + +router = APIRouter() + + +@router.post("/sql") +async def query_sql( + request: Request, + game: str, + data_in: schemas.Sql, + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """原 sql 查询 """ + sql = data_in.sql + sql = sql.replace('$game', game) + data = await ckdb.execute(sql) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/sql_export") +async def query_sql( + request: Request, + game: str, + data_in: schemas.Sql, + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +): + """sql 导出 """ + file_name = quote(f'result.xlsx') + mime = mimetypes.guess_type(file_name)[0] + + sql = data_in.sql + sql = sql.replace('$game', game) + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + df_to_stream = DfToStream((df, 'result')) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + + +@router.post("/event_model_sql") +async def event_model_sql( + request: Request, + game: str, + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ 事件分析模型 sql""" + + await analysis.init(data_where=current_user.data_where) + data = await analysis.event_model_sql() + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/event_model_export") +async def event_model_export(request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """ 事件分析模型 数据导出""" + await analysis.init(data_where=current_user.data_where) + sqls = await analysis.event_model_sql() + file_name = quote(f'{sqls[0]["report_name"]}.xlsx') + mime = mimetypes.guess_type(file_name)[0] + excels = [] + for item in sqls: + if item.get('combination_event'): + continue + sql = item['sql'] + event_name = item['event_name'] + df = await ckdb.query_dataframe(sql) + if df.empty: + continue + if 'date' in df: + df.sort_values('date', inplace=True) + try: + df['date'] = df['date'].dt.tz_localize(None) + except: + pass + excels.append((df, event_name)) + df_to_stream = DfToStream(*excels) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + +@router.post("/event_model_pay") +async def event_model_export(request: Request, + game: str, + data_in: schemas.Times, + ckdb: CKDrive = Depends(get_ck_db) + ): + """下载日充总额详细订单数据""" + sql=f"""select * FROM {game}.event WHERE addHours(`#event_time`, 8) >= '{data_in.start_time}' and addHours(`#event_time`, 8) <= '{data_in.end_time}' and `#event_name` = 'pay' and +orderid NOT LIKE '%GM%' order by `#event_time`""" + df = await ckdb.query_dataframe(sql) + list_columns=list(df.columns.values) + drop_list=[] + for i in list_columns: + aa=type(df[i][0]) + if df[i][0] == None or df[i][0] == [] or df[i][0] == '': + drop_list.append(i) + else: + if 'time' in i : + df[i] = df[i].astype(str) + for nu in range(len(df)): + df.replace(to_replace=df[i][nu],value=df[i][nu].split('+')[0],inplace=True) + + df.drop(drop_list, axis=1, inplace=True) + file_name=quote(f'订单详情.xlsx') + mime = mimetypes.guess_type(file_name)[0] + df_to_stream = DfToStream((df, '订单详情')) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + +# @router.get("/event_model_export") +# async def event_model_export(request: Request, +# game: str, +# report_id: str, +# ckdb: CKDrive = Depends(get_ck_db), +# # analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), +# current_user: schemas.UserDB = Depends(deps.get_current_user) +# ): +# """ 事件分析模型 数据导出""" +# analysis = BehaviorAnalysis(game, schemas.CkQuery(report_id=report_id), get_redis_pool()) +# await analysis.init(data_where=current_user.data_where) +# sqls = analysis.event_model_sql() +# res = [] +# file_name = f'{sqls[0]["report_name"]}.xlsx' +# mime = mimetypes.guess_type(file_name)[0] +# for item in sqls[:1]: +# sql = item['sql'] +# event_name = item['event_name'] +# df = await ckdb.query_dataframe(sql) +# file = df_to_stream(df, event_name) +# return StreamingResponse(file, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) +# + +@router.post("/event_model") +async def event_model( + request: Request, + game: str, + data_in: schemas.CkQuery, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + rdb: RedisDrive = Depends(get_redis_pool), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ 事件分析""" + await analysis.init(data_where=current_user.data_where) + try: + sqls = await analysis.event_model_sql() + except Exception as e: + return schemas.Msg(code=-9, msg='报表配置参数异常') + res = [] + is_hide = [] + + for idx, item in enumerate(sqls): #列出索引下标 + if item.get('is_show') == False: + is_hide.append(idx) + #event_name:事件名,日充总额 + #format:float浮点型 + q = { + 'groups': [], + 'values': [], + 'sum': [], + 'avg': [], + 'event_name': item['event_name'], + 'format': item['format'], + 'last_value': 0, + 'start_date': item['start_date'], + 'end_date': item['end_date'], + 'time_particle': item['time_particle'] + } + # 处理组合问题,如combination_event不存在则跳过 + if item.get('combination_event'): + combination_event = CombinationEvent(res, item.get('combination_event'), item['format']) + values, sum_, avg = combination_event.parse() + + # q['values'].append(values) + #q['sum'].append(sum_) + q['avg'].append(avg) + q['date_range'] = item['date_range'] + for last_value in values[::-1]: + if last_value > 0: + q['last_value'] = float(last_value) + break + if list(item.get('event_name'))[-1] == '率': + for i in range(len(values)): + values[i]=str((values[i]))+'%' + q['values'].append(values) + q['sum'].append(str(sum_)+'%') + elif '比' in item['event_name']: + for i in range(len(values)): + values[i]=str(int(float(values[i])*100))+'%' + q['values'].append(values) + q['sum'].append(str(int(float(sum_)*100))+'%') + else: + q['values'].append(values) + q['sum'].append(sum_) + res.append(q) + continue + #sql语句 + sql = item['sql'] + groupby = item['groupby'] + date_range = item['date_range'] #获取的要查询的每一天的时间 + q['date_range'] = date_range #把要查询的时间加入q字典中 + df = await ckdb.query_dataframe(sql) #以sql语句查出数据,df是二维列表 + df.fillna(0, inplace=True)#以0填补空数据 + #获取第一矩阵的长度 + if df.shape[0] == 0: + df = pd.DataFrame({'date': date_range, 'values': 0 * len(date_range)}) + # continue + # return schemas.Msg(code=0, msg='ok', data=[q]) + if item['time_particle'] == 'total': + # for group, df_group in df.groupby(groupby): + # df_group.reset_index(drop=True, inplace=True) + q['groups'].append(groupby) + q['values'].append(df['values'].to_list()) + q['sum'].append(round(float(df['values'].sum()), 2)) + q['avg'].append(round(float(df['values'].mean()), 2)) + for last_value in df['values'].values[::-1]: + if last_value > 0: + q['last_value'] = float(last_value) + break + if groupby and (set(groupby) & set(df) == set(groupby)): + q['date_range'] = [f'{i}' for i in df.set_index(groupby).index] + else: + q['date_range'] = ['合计'] + # 暂时只执行像素的计费点加别名 + if game == 'xiangsu': + if item['groupby'][0] == 'proid' and analysis.events[0]['event_name'] == 'pay': + # 将对应英文的中文意思按位置一一对应返回给前端 + proid_dict = await crud.proid_map.get_all_show_name(db, game) + res_list = [] + for i in q['date_range']: + try: + name = proid_dict[i] + res_list.append(name) + except: + pass + + q['proid_name'] = res_list + # 将proid字段和金额money按对应关系组合成字典并算出对应的总额返回给前端 + money_dict = await crud.proid_map.get_all_show_money(db, game) + add_money = [] + number = q['values'][0] + next = -1 + for i in q['date_range']: + next += 1 + try: + mongey = money_dict[i] + add = number[next] * mongey + add_money.append(add) + except: + pass + q['proid_money'] = add_money + # 首充金额分布 + # if item['groupby'][0] == 'money' and analysis.events[0]['event_name'] == 'pay': + # # 将proid字段和金额money按对应关系组合成字典并算出对应的总额返回给前端 + # money_dict = await crud.proid_map.get_all_show_money(db, game) + # add_money = [] + # number = q['values'][0] + # next = -1 + # for i in q['date_range']: + # next += 1 + # mongey = money_dict[i] + # add = number[next] * mongey + # add_money.append(add) + # q['proid_money'] = add_money + res.append(q) + continue + if groupby and (set(groupby) & set(df)) == set(groupby): + columns = groupby[0] + df[columns] = df[columns].astype(str) + # 有分组 + for group, df_group in df.groupby(groupby): + #在原数据上将索引重新转换为列,新索引的列删除 + df_group.reset_index(drop=True, inplace=True) + #判断为0的改成未知城市 + if str(group) == '0' and analysis.event_view['groupBy'][0]['columnDesc']== '城市': + q['groups'].append('未知城市') + else: + q['groups'].append(str(group)) + concat_data = [] + for i in set(date_range) - set(df_group['date']): + if len(groupby) > 1: + concat_data.append((i, *group, 0)) + else: + concat_data.append((i, group, 0)) + df_group = pd.concat([df_group, pd.DataFrame(concat_data, columns=df_group.columns)]) + df_group.sort_values('date', inplace=True) + q['values'].append(df_group['values'].to_list()) + q['sum'].append(round(float(df_group['values'].sum()), 2)) + q['avg'].append(round(float(df_group['values'].mean()), 2)) + for last_value in df['values'].values[::-1]: + if last_value > 0: + q['last_value'] = float(last_value) + break + + else: + # 无分组 + concat_data = [] + for i in set(date_range) - set(df['date']): + concat_data.append((i, 0)) + #纵向拼接两个表 + df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)]) + #在原数据上按data排序 + df.sort_values('date', inplace=True) + if len(df) >= 2: + q['chain_ratio'] = division((df.iloc[-1, 1] - df.iloc[-2, 1]) * 100, df.iloc[-2, 1], 2) + if len(df) >= 8: + q['wow'] = division((df.iloc[-1, 1] - df.iloc[-8, 1]) * 100, df.iloc[-8, 1], 2) or 0 + q['values'].append(abs(df['values']).to_list()) + for last_value in df['values'].values[::-1]: + if last_value > 0: + q['last_value'] = float(last_value) + break + #求所有值的和 + q['sum'].append(round(abs(float(df['values'].sum())), 2)) + #求平均值 + q['avg'].append(round(float(df['values'].mean()), 2)) + + # q['eventNameDisplay']=item['event_name_display'] + res.append(q) + # 按总和排序 + for item in res: + try: + if item['time_particle'] in ('P1D', 'P1W'): #按格式修改年月日 + item['date_range'] = [d.strftime('%Y-%m-%d') for d in item['date_range']] + elif item['time_particle'] in ('P1M',): + item['date_range'] = [d.strftime('%Y-%m') for d in item['date_range']] + else: + item['date_range'] = [d.strftime('%Y-%m-%d %H:%M:%S') for d in item['date_range']] + except: + pass + + sort_key = np.argsort(np.array(item['sum']))[::-1]#将sum中的元素从小到大排列后的结果,提取其对应的索引,然后倒着输出到变量之中 + if item.get('groups'): + item['groups'] = np.array(item['groups'])[sort_key].tolist() + item['values'] = np.array(item['values'])[sort_key].tolist() + item['sum'] = np.array(item['sum'])[sort_key].tolist() + item['avg'] = np.array(item['avg'])[sort_key].tolist() + res = [item for idx, item in enumerate(res) if idx not in is_hide] + + return schemas.Msg(code=0, msg='ok', data=res) + + +@router.post("/retention_model_sql") +async def retention_model_sql( + request: Request, + game: str, + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """留存查询 sql""" + await analysis.init(data_where=current_user.data_where) + data = await analysis.retention_model_sql2() + return schemas.Msg(code=0, msg='ok', data=[data]) + + +@router.post("/retention_model") +async def retention_model(request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + await analysis.init(data_where=current_user.data_where) + try: + res = await analysis.retention_model_sql2() #初始化开始时间结束时间,sql语句 字典 + except Exception as e: + return schemas.Msg(code=-9, msg='报表配置参数异常') + sql = res['sql'] #获取到sql语句 + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + + date_range = res['date_range'] #时间 列表 + unit_num = res['unit_num'] #int + retention_n = res['retention_n'] #列表 int + filter_item_type = res['filter_item_type'] #all + filter_item = res['filter_item'] #列表 0,1,3,7,14,21,30 + df.set_index('reg_date', inplace=True) + for d in set(res['date_range']) - set(df.index): + df.loc[d] = 0 + + df.sort_index(inplace=True) + summary_values = {'均值': {}} + max_retention_n = 1 + #留存人数 + avg = {} + #流失人数 + avgo = {} + for date, v in df.T.items(): + #字典中data存在时不替换,否则将data替换成空字典 + tmp = summary_values.setdefault(date, dict()) + tmp['d0'] = int(v.cnt0) + tmp['p'] = [] + tmp['n'] = [] + tmp['p_outflow'] = [] + tmp['n_outflow'] = [] + for i in retention_n: + n = (pd.Timestamp.now().date() - date).days + if i > n: + continue + # max_retention_n = i if i > max_retention_n else max_retention_n + #留存的人数 + avg[i] = avg.setdefault(i, 0) + v[f'cnt{i}'] + #流失的人数 + avgo[i] = avgo.setdefault(i, 0) + v[f'on{i}'] + tmp['p'].append(v[f'p{i}']) + tmp['n'].append(v[f'cnt{i}']) + tmp['p_outflow'].append(v[f'op{i}']) + tmp['n_outflow'].append(v[f'on{i}']) + tmp = summary_values['均值'] + retention_avg_dict = {} + + for rn in retention_n: + for rt, rd in df.T.items(): + if rt + datetime.timedelta(days=rn) <= pd.datetime.now().date(): + retention_avg_dict.setdefault(rn, {'cnt0': 0, 'cntn': 0,'o_cnt0':0,'o_cntn':0}) + retention_avg_dict[rn]['cnt0'] += rd['cnt0'] + retention_avg_dict[rn]['cntn'] += rd[f'cnt{rn}'] + retention_avg_dict[rn]['o_cnt0'] += rd['cnt0'] + retention_avg_dict[rn]['o_cntn'] += rd[f'on{rn}'] + + tmp['p'] = [] + tmp['n'] = [] + tmp['p_outflow'] = [] + tmp['n_outflow'] = [] + tmp['d0'] = 0 + for rt, rd in retention_avg_dict.items(): + tmp['d0'] = int(df['cnt0'].sum()) + n = round(rd['cntn'] * 100 / rd['cnt0'],2) + n = 0 if np.isnan(n) else n + tmp['p'].append(n) + tmp['n'].append(rd['cntn']) + n = round(rd['o_cntn'] * 100 / rd['cnt0'],2) + n = 0 if np.isnan(n) else n + tmp['p_outflow'].append(n) + tmp['n_outflow'].append(rd['o_cntn']) + + #次留数 + title = ['日期', '用户数', '次留', *[f'{i + 1}留' for i in retention_n[1:]]] + + # 未到达的日期需要补齐- + retention_length = len(retention_n) + for _, items in summary_values.items(): + for key in ['p', 'n', 'p_outflow', 'n_outflow']: + items[key].extend(['-'] * (retention_length - len(items[key]))) + + resp = { + 'summary_values': summary_values, + # 'values': values, + 'date_range': [d.strftime('%Y-%m-%d') for d in date_range], + 'title': title, + 'filter_item_type': filter_item_type, + 'filter_item': filter_item, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + + } + return schemas.Msg(code=0, msg='ok', data=resp) + + +async def retention_model01(request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + await analysis.init(data_where=current_user.data_where) + res = await analysis.retention_model_sql2() #初始化开始时间结束时间,sql语句 字典 + sql = res['sql'] #获取到sql语句 + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + + date_range = res['date_range'] #时间 列表 + unit_num = res['unit_num'] #int + retention_n = res['retention_n'] #列表 int + filter_item_type = res['filter_item_type'] #all + filter_item = res['filter_item'] #列表 0,1,3,7,14,21,30 + df.set_index('reg_date', inplace=True) + for d in set(res['date_range']) - set(df.index): + df.loc[d] = 0 + + df.sort_index(inplace=True) + summary_values = {'均值': {}} + max_retention_n = 1 + avg = {} + avgo = {} + for date, v in df.T.items(): + #字典中data存在时不替换,否则将data替换成空字典 + tmp = summary_values.setdefault(date, dict()) + tmp['d0'] = int(v.cnt0) + tmp['p'] = [] + tmp['n'] = [] + tmp['p_outflow'] = [] + tmp['n_outflow'] = [] + for i in retention_n: + n = (pd.Timestamp.now().date() - date).days + if i > n: + continue + # max_retention_n = i if i > max_retention_n else max_retention_n + avg[i] = avg.setdefault(i, 0) + v[f'cnt{i}'] + avgo[i] = avgo.setdefault(i, 0) + v[f'on{i}'] + tmp['p'].append(round(100-v[f'p{i}'],2)) + #tmp['p'].append(v[f'p{i}']) + tmp['n'].append(v[f'cnt{i}']) + tmp['p_outflow'].append(v[f'op{i}']) + tmp['n_outflow'].append(v[f'on{i}']) + tmp = summary_values['均值'] + retention_avg_dict = {} + + for rn in retention_n: + for rt, rd in df.T.items(): + if rt + datetime.timedelta(days=rn) <= pd.datetime.now().date(): + retention_avg_dict.setdefault(rn, {'cnt0': 0, 'cntn': 0,'o_cnt0':0,'o_cntn':0}) + retention_avg_dict[rn]['cnt0'] += rd['cnt0'] + retention_avg_dict[rn]['cntn'] += rd[f'cnt{rn}'] + + retention_avg_dict[rn]['o_cnt0'] += rd['cnt0'] + retention_avg_dict[rn]['o_cntn'] += rd[f'on{rn}'] + + tmp['p'] = [] + tmp['n'] = [] + tmp['p_outflow'] = [] + tmp['n_outflow'] = [] + tmp['d0'] = 0 + for rt, rd in retention_avg_dict.items(): + tmp['d0'] = int(df['cnt0'].sum()) + n = round(100-(rd['cntn'] * 100 / rd['cnt0']), 2) + #n = round(rd['cntn'] * 100 / rd['cnt0'],2) + n = 0 if np.isnan(n) else n + tmp['p'].append(n) + tmp['n'].append(rd['cntn']) + n = round(rd['o_cntn'] * 100 / rd['cnt0'],2) + n = 0 if np.isnan(n) else n + tmp['p_outflow'].append(n) + tmp['n_outflow'].append(rd['o_cntn']) + + + title = ['日期', '用户数', '次流失', *[f'{i + 1}流失' for i in retention_n[1:]]] + + # 未到达的日期需要补齐- + retention_length = len(retention_n) + for _, items in summary_values.items(): + for key in ['p', 'n', 'p_outflow', 'n_outflow']: + items[key].extend(['-'] * (retention_length - len(items[key]))) + + resp = { + 'summary_values': summary_values, + # 'values': values, + 'date_range': [d.strftime('%Y-%m-%d') for d in date_range], + 'title': title, + 'filter_item_type': filter_item_type, + 'filter_item': filter_item, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + + } + return schemas.Msg(code=0, msg='ok', data=resp) + + + +@router.post("/retention_model_export") +async def retention_model_export(request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """ 留存分析模型 数据导出""" + await analysis.init(data_where=current_user.data_where) + data = await analysis.retention_model_sql2() + file_name = quote(f'留存分析.xlsx') + mime = mimetypes.guess_type(file_name)[0] + + sql = data['sql'] + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + df_to_stream = DfToStream((df, '留存分析')) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + + +@router.post("/retention_model_del", deprecated=True) +async def retention_model_del( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """留存数据模型""" + await analysis.init(data_where=current_user.data_where) + res = await analysis.retention_model_sql() + sql = res['sql'] + date_range = res['date_range'] + event_a, event_b = res['event_name'] + unit_num = res['unit_num'] + title = await crud.event_mana.get_show_name(db, game, event_a) + title = f'{title}用户数' + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + concat_data = [] + df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)]) + df['date'] = df['date'].apply(lambda x: x.date()) + # 计算整体 + summary_df = df.groupby('date')[['val_a', 'val_b', 'amount_a']].sum() + summary_values = {} + for i, d1 in enumerate(date_range): + a = set(summary_df.loc[d1]['val_a']) if d1 in summary_df.index else set() + if not a: + continue + key = d1.strftime('%Y-%m-%d') + for j, d2 in enumerate(date_range[i:]): + if j > unit_num: + break + b = set(summary_df.loc[d2]['val_b']) if d2 in summary_df.index else set() + tmp = summary_values.setdefault(key, {}) + tmp.setdefault('d0', len(a)) + tmp.setdefault('p', []).append(division(len(a & b) * 100, len(a))) + tmp.setdefault('n', []).append(len(a & b)) + tmp.setdefault('p_outflow', []).append(round(100 - division(len(a & b) * 100, len(a)), 2)) + tmp.setdefault('n_outflow', []).append(len(a) - len(a & b)) + groups = set([tuple(i) for i in df[res['groupby']].values]) + df.set_index(res['groupby'], inplace=True) + df.sort_index(inplace=True) + values = {} + days = [i for i in range((date_range[-1] - date_range[0]).days + 1)][:unit_num + 1] + for i, d1 in enumerate(date_range): + for g in groups: + if len(g) == 1: + continue + a = set(df.loc[g]['val_a']) if g in df.index else set() + if not a: + continue + key = d1.strftime("%Y-%m-%d") + tmp_g = values.setdefault(key, {}) + for j, d2 in enumerate(date_range[i:]): + if j > unit_num: + break + b = set(df.loc[g]['val_b']) if g in df.index else set() + tmp = tmp_g.setdefault(','.join(g[1:]), {}) + tmp.setdefault('d0', len(a)) + tmp.setdefault('p', []).append(division(len(a & b) * 100, len(a))) + tmp.setdefault('n', []).append(len(a & b)) + data = { + 'summary_values': summary_values, + 'values': values, + 'days': days, + 'date_range': [d.strftime('%Y-%m-%d') for d in date_range][:unit_num + 1], + 'title': title, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/funnel_model_sql") +async def funnel_model_sql( + request: Request, + game: str, + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """漏斗数据模型 sql""" + await analysis.init(data_where=current_user.data_where) + data = await analysis.funnel_model_sql() + return schemas.Msg(code=0, msg='ok', data=[data]) + + +@router.post("/funnel_model") +async def funnel_model( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """漏斗数据模型""" + await analysis.init(data_where=current_user.data_where) + res = await analysis.funnel_model_sql() + sql = res['sql'] + #查询的时间 + date_range = res['date_range'] + cond_level = res['cond_level'] + groupby = res['groupby'] + + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + # 补齐level数据 + concat_data = [] + for key, tmp_df in df.groupby(['date'] + groupby): + not_exists_level = {i for i in range(1, len(cond_level) + 1)} - set(tmp_df['level']) + for item in not_exists_level: + key = key if isinstance(key, tuple) else (key,) + concat_data.append((*key, item, 0)) + #合并数据 + df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)]) + + # df.set_index('date',inplace=True) + data_list = [] + date_data = {} + if df.shape == (0, 0): + return schemas.Msg(code=0, msg='ok', data={'list': data_list, 'level': cond_level}) + + tmp = {'title': '总体'} + #以level分组后的和 + tmp_df = df[['level', 'values']].groupby('level').sum() + #在原数据上对索引进行排序 + tmp_df.sort_index(inplace=True) + for i in tmp_df.index: + tmp_df.loc[i, 'values'] = tmp_df.loc[i:]['values'].sum() + + tmp['n'] = tmp_df['values'].to_list() + tmp['p1'] = [100] + # tmp['p2'] = [] + for i, v in tmp_df.loc[2:, 'values'].items(): + tmp['p1'].append(round(v * 100 / tmp_df.loc[1, 'values'], 2)) + # tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2)) + data_list.append(tmp) + + # 补齐日期 + all_idx = {(dt, lv) for dt in date_range for lv in range(1, len(cond_level) + 1)} + concat_data = [] + for i in all_idx - set(df.set_index(['date', 'level']).index): + concat_data.append((*i, 0)) + summary_df = pd.concat( + [df[['date', 'level', 'values']], pd.DataFrame(concat_data, columns=['date', 'level', 'values'])]) + for key, tmp_df in summary_df.groupby('date'): + tmp_df = tmp_df.groupby('level').sum() + tmp_df.sort_index(inplace=True) + for i in tmp_df.index: + tmp_df.loc[i, 'values'] = tmp_df.loc[i:]['values'].sum() + + tmp = dict() + + tmp['n'] = tmp_df['values'].to_list() + tmp['p1'] = [100] + # tmp['p2'] = [] + for i, v in tmp_df.loc[2:, 'values'].items(): + var = round(v * 100 / tmp_df.loc[1, 'values'], 2) + var = 0 if np.isnan(var) else var + tmp['p1'].append(var) + # tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2)) + + _ = date_data.setdefault(key.strftime('%Y-%m-%d'), {}) + _['总体'] = tmp + + if groupby: + # 补齐数据 + concat_data = [] + idx = set(df.set_index(['date'] + groupby).index) + all_idx = {(*j, i) for i in range(1, len(cond_level) + 1) for j in idx} + for i in all_idx - set(df.set_index(list(('date', *groupby, 'level'))).index): + concat_data.append((*i, 0)) + + df = pd.concat([df, pd.DataFrame(concat_data, columns=df.columns)]) + # df.sort_values(list((*groupby, 'level')), inplace=True, ascending=False) + + for key, tmp_df in df.groupby(groupby): + tmp = {'title': key} + tmp_df = tmp_df.groupby('level').sum() + tmp_df.sort_index(inplace=True) + for i in tmp_df.index: + tmp_df.loc[i, 'values'] = tmp_df.loc[i:]['values'].sum() + + tmp['n'] = tmp_df['values'].to_list() + tmp['p1'] = [100] + # tmp['p2'] = [] + for i, v in tmp_df.loc[2:, 'values'].items(): + var = round(v * 100 / tmp_df.loc[1, 'values'], 2) + var = 0 if np.isnan(var) else var + tmp['p1'].append(var) + # tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2)) + data_list.append(tmp) + + for key, tmp_df in df.groupby(['date'] + groupby): + + tmp_df = tmp_df.groupby('level').sum() + tmp_df.sort_index(inplace=True) + for i in tmp_df.index: + tmp_df.loc[i, 'values'] = tmp_df.loc[i:]['values'].sum() + + tmp = dict() + + tmp['n'] = tmp_df['values'].to_list() + tmp['p1'] = [100] + # tmp['p2'] = [] + for i, v in tmp_df.loc[2:, 'values'].items(): + var = round(v * 100 / tmp_df.loc[1, 'values'], 2) + var = 0 if np.isnan(var) else var + tmp['p1'].append(var) + # tmp['p2'].append(round(v*100 / tmp_df.loc[i - 1, 'values'], 2)) + + _ = date_data.setdefault(key[0].strftime('%Y-%m-%d'), {}) + _[key[1]] = tmp + title = (groupby or ['总体']) + cond_level + resp = {'list': data_list, + 'date_data': date_data, + 'title': title, + 'level': cond_level, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + return schemas.Msg(code=0, msg='ok', data=resp) + + +@router.post("/scatter_model_sql") +async def scatter_model_sql( + request: Request, + game: str, + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """分布分析 sql""" + await analysis.init(data_where=current_user.data_where) + data = await analysis.scatter_model_sql() + return schemas.Msg(code=0, msg='ok', data=[data]) + + +@router.post("/scatter_model_export") +async def retention_model_export(request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """ 分布分析 数据导出""" + await analysis.init(data_where=current_user.data_where) + res = await analysis.scatter_model_sql() + file_name = quote(f'分布分析.xlsx') + mime = mimetypes.guess_type(file_name)[0] + sql = res['sql'] + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + interval_type = res['interval_type'] + analysis = res['analysis'] + groupby = res['groupby'] + quota_interval_arr = res['quota_interval_arr'] + # 兼容合计的 + if res['time_particle'] == 'total': + df['date'] = '合计' + if analysis != 'number_of_days' and interval_type != 'discrete': + max_v = int(df['values'].max()) + min_v = int(df['values'].min()) + interval = (max_v - min_v) // 10 or 1 + resp = {'list': dict(), + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + + if not quota_interval_arr: + resp['label'] = [f'[{i},{i + interval})' for i in range(min_v, max_v, interval)] + bins = [i for i in range(min_v, max_v + interval, interval)] + else: + quota_interval_arr = [-float('inf')] + quota_interval_arr + [float('inf')] + resp['label'] = [] + bins = [quota_interval_arr[0]] + for i, v in enumerate(quota_interval_arr[1:]): + resp['label'].append(f'[{quota_interval_arr[i]},{v})') + bins.append(v) + + # 这是整体的 + for key, tmp_df in df.groupby('date'): + bins_s = pd.cut(tmp_df['values'], bins=bins, + right=False).value_counts() + bins_s.sort_index(inplace=True) + total = int(bins_s.sum()) + if res['time_particle'] == 'total': + resp['list']['合计'] = dict() + resp['list']['合计']['总体'] = {'n': bins_s.to_list(), 'total': total, + 'p': round(bins_s * 100 / total, 2).to_list(), + 'title': '总体'} + else: + resp['list'][key.strftime('%Y-%m-%d')] = dict() + resp['list'][key.strftime('%Y-%m-%d')]['总体'] = {'n': bins_s.to_list(), 'total': total, + 'p': round(bins_s * 100 / total, 2).to_list(), + 'title': '总体'} + + # 分组的 + if groupby: + export_df = pd.DataFrame(columns=resp['label']) + + for key, tmp_df in df.groupby(['date', *groupby]): + bins_s = pd.cut(tmp_df['values'], bins=bins, + right=False).value_counts() + bins_s.sort_index(inplace=True) + total = int(bins_s.sum()) + title = '.'.join(key[1:]) + date = key[0] + resp['list'][date.strftime('%Y-%m-%d')][title] = {'n': bins_s.to_list(), 'total': total, + 'p': round(bins_s * 100 / total, 2).to_list(), + 'title': title + } + + export_df.loc[(date.strftime('%Y-%m-%d'), title)] = bins_s.to_list() + + df_to_stream = DfToStream((export_df, '分布分析'), (df, '分布分析原始数据'), index=True) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, + headers={'Content-Disposition': f'filename="{file_name}"'}) + + + # elif analysis == 'number_of_days': + else: + resp = {'list': {}, 'label': [], + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + labels = [str(i) for i in sorted(df['values'].unique())] + resp['label'] = labels + for key, tmp_df in df.groupby(['date']): + total = len(tmp_df) + if res['time_particle'] == 'total': + dt = '合计' + else: + dt = key.strftime('%Y-%m-%d') + labels_dict = {} + for key2, tmp_df2 in tmp_df.groupby('values'): + label = str(key2) + n = len(tmp_df2) + labels_dict[label] = n + + resp['list'][dt] = {'总体': {'n': [labels_dict.get(i, 0) for i in labels], 'total': total, + 'p': [round(labels_dict.get(i, 0) * 100 / total, 2) for i in labels]}} + + export_df = pd.DataFrame(columns=resp['label']) + for d, v in resp['list'].items(): + export_df.loc[d] = v['总体']['n'] + + df_to_stream = DfToStream((export_df, '分布分析'), (df, '分布分析原始数据'), index=True) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + + +@router.post("/scatter_model") +async def scatter_model( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """分布分析 模型""" + await analysis.init(data_where=current_user.data_where) + event_type = analysis.events[0]['eventName'] + try: + res = await analysis.scatter_model_sql() + except Exception as e: + return schemas.Msg(code=-9, msg='报表配置参数异常') + end_date=analysis.end_date + start_date=analysis.start_date + where=analysis.events[-1]['quotaname'] + sql = res['sql'] + + #查询买量渠道owner为kuaiyou3的日注册玩家等级分布 + # sql_list=sql.split("GROUP BY") + # sql01 = """and xiangsu.event.owner_name='kuaiyou3'GROUP BY""""" + # new_sql=sql_list[0]+sql01+sql_list[1] + + + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + if 'list' in str(type(df['values'][0])): + # f=lambda x:x[0] + # df['values']=df['values'].map(f) + df = df.explode("values").reset_index(drop=True) + #df['values']=df['values'].astype(str) + df.fillna(0, inplace=True) + #转换数据类型为int + df['values'] = df['values'].astype(int) + interval_type = res['interval_type'] + analysi = res['analysis'] + groupby = res['groupby'] + quota_interval_arr = res['quota_interval_arr'] + # 兼容合计的 + if res['time_particle'] == 'total': + df['date'] = '合计' + + if analysi != 'number_of_days' and interval_type != 'discrete': + max_v = int(df['values'].max()) + min_v = int(df['values'].min()) + interval = (max_v - min_v) // 10 or 1 + resp = {'list': dict(), + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + + if not quota_interval_arr: + resp['label'] = [f'[{i},{i + interval})' for i in range(min_v, max_v, interval)] + bins = [i for i in range(min_v, max_v + interval, interval)] + else: + quota_interval_arr = [-float('inf')] + quota_interval_arr + [float('inf')] + resp['label'] = [] + bins = [quota_interval_arr[0]] + for i, v in enumerate(quota_interval_arr[1:]): + resp['label'].append(f'[{quota_interval_arr[i]},{v})') + bins.append(v) + + # 这是整体的 + for key, tmp_df in df.groupby('date'): + bins_s = pd.cut(tmp_df['values'], bins=bins, + right=False).value_counts() + bins_s.sort_index(inplace=True) + total = int(bins_s.sum()) + if res['time_particle'] == 'total': + resp['list']['合计'] = dict() + p = list(round(bins_s * 100 / total, 2).to_list()) + resp['list']['合计']['总体'] = {'n': bins_s.to_list(), 'total': total, + 'p': [str(i)+'%' for i in p], + 'title': '总体'} + else: + p=list(round(bins_s * 100 / total, 2).to_list()) + resp['list'][key.strftime('%Y-%m-%d')] = dict() + resp['list'][key.strftime('%Y-%m-%d')]['总体'] = {'n': bins_s.to_list(), 'total': total, + 'p':[str(i)+'%' for i in p], + 'title': '总体'} + # 分组的 + # if groupby: + # for key, tmp_df in df.groupby(['date', *groupby]): + # bins_s = pd.cut(tmp_df['values'], bins=bins, + # right=False).value_counts() + # bins_s.sort_index(inplace=True) + # total = int(bins_s.sum()) + # title = '.'.join(key[1:]) + # date = key[0] + # resp['list'][date.strftime('%Y-%m-%d')][title] = {'n': bins_s.to_list(), 'total': total, + # 'p': round((bins_s * 100 / total).fillna(0), + # 2).to_list(), + # 'title': title + # } + download=analysis.event_view.get('download','') + if download == 1: + creat_df = create_df(resp) + Download=Download_xlsx(creat_df, '分布分析') + return Download + return schemas.Msg(code=0, msg='ok', data=resp) + + # elif analysis == 'number_of_days': + else: + resp = {'list': {}, 'label': [], + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + labels = [str(i) for i in sorted(df['values'].unique())] + resp['label'] = labels + for key, tmp_df in df.groupby(['date']): + total = len(tmp_df) + if res['time_particle'] == 'total': + dt = '合计' + else: + dt = key.strftime('%Y-%m-%d') + + labels_dict = {} + for key2, tmp_df2 in tmp_df.groupby('values'): + label = str(key2) + n = len(tmp_df2) + labels_dict[label] = n + if event_type == 'pay': + #如需要2之后所有之和,则执行下面代码,返回值为字典的labels_dict01 + labels_dict01={} + v=-1 + for i in labels: + v +=1 + if int(i) == 1: + labels_dict01["1"]=labels_dict["1"] + else: + # for number in labels_dict.keys(): + # if number >=i: + values=list(labels_dict.values()) + n=sum(values[v:]) + labels_dict01[i]=n + #传入百分比数据 + list_p=[] + for i in labels: + number_int=round(labels_dict01.get(i, 0) * 100 / total, 2) + number_str=str(number_int)+'%' + list_p.append(number_str) + + resp['list'][dt] = {'总体': {'n': [labels_dict01.get(i, 0) for i in labels], 'total': total, + 'p': list_p}} + else: + list_p=[] + for i in labels: + number_int=round(labels_dict.get(i, 0) * 100 / total, 2) + number_str=str(number_int)+'%' + list_p.append(number_str) + resp['list'][dt] = {'总体': {'n': [labels_dict.get(i, 0) for i in labels], 'total': total, + 'p': list_p}} + #resp['list'][dt] = {'总体': {'n': [labels_dict.get(i, 0) for i in labels], 'total': total, + # 'p': [round(labels_dict.get(i, 0) * 100 / total, 2) for i in labels]}} + if where =="step_id" and event_type == "guide": + sql=f"""SELECT toDate(addHours({game}.event."#event_time", 8)) AS date, count(DISTINCT {game}.event."#account_id") AS values FROM {game}.event WHERE addHours({game}.event."#event_time", 8) >= '{start_date}' AND addHours({game}.event."#event_time", 8) <= '{end_date}' AND {game}.event."#event_name" = 'create_account' GROUP BY toDate(addHours({game}.event."#event_time", 8)) ORDER BY date""" + df = await ckdb.query_dataframe(sql) + for i in range(len(df)): + resp['list'][str(df['date'][i])]['总体']['total']=int(df['values'][i]) + #兼容下载功能 + download=analysis.event_view.get('download','') + if download == 1: + creat_df=create_df(resp) + Download=Download_xlsx(creat_df,'分布分析') + return Download + return schemas.Msg(code=0, msg='ok', data=resp) + + # bins_s = pd.cut(tmp_df['values'], bins=bins, + # right=False).value_counts() + # bins_s.sort_index(inplace=True) + # total = int(bins_s.sum()) + # resp['list'][key.strftime('%Y-%m-%d')] = dict() + # resp['list'][key.strftime('%Y-%m-%d')]['总体'] = {'n': bins_s.to_list(), 'total': total, + # 'p': round(bins_s * 100 / total, 2).to_list(), + # 'title': '总体'} +@router.post("/scatter_model_details") +async def scatter_model( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + db: AsyncIOMotorDatabase = Depends(get_database), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + await analysis.init(data_where=current_user.data_where) + try: + res = await analysis.scatter_model_sql() + except Exception as e: + return schemas.Msg(code=-9, msg='报表配置参数异常') + event_type = analysis.events[0]['eventName'] + where = analysis.events[-1]['quotaname'] + sql=res['sql'] + columnName = analysis.event_view['groupBy'][-1]['columnName'] + if analysis.event_view['groupBy'] != []: + sql=sql.replace(f'toDate(addHours({game}.event."#event_time", 8)) AS date', f'max(`{columnName}`) as va', 1) + sql=sql.replace(f'toDate(addHours({game}.event."#event_time", 8)),','',1) + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + if 'list' in str(type(df['values'][0])): + # f = lambda x: x[0] + # df['values'] = df['values'].map(f) + df = df.explode("values").reset_index(drop=True) + df.fillna(0, inplace=True) + # 转换数据类型为int + df['values'] = df['values'].astype(int) + interval_type = res['interval_type'] + analysi = res['analysis'] + groupby = res['groupby'] + quota_interval_arr = res['quota_interval_arr'] + # 兼容合计的 + if res['time_particle'] == 'total': + df['date'] = '合计' + + if analysi != 'number_of_days' and interval_type != 'discrete': + max_v = int(df['values'].max()) + min_v = int(df['values'].min()) + interval = (max_v - min_v) // 10 or 1 + resp = {'list': dict(), + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'], + 'biaotou':columnName + } + + if not quota_interval_arr: + resp['label'] = [f'[{i},{i + interval})' for i in range(min_v, max_v, interval)] + bins = [i for i in range(min_v, max_v + interval, interval)] + else: + quota_interval_arr = [-float('inf')] + quota_interval_arr + [float('inf')] + resp['label'] = [] + bins = [quota_interval_arr[0]] + for i, v in enumerate(quota_interval_arr[1:]): + resp['label'].append(f'[{quota_interval_arr[i]},{v})') + bins.append(v) + if 'float' in str(df.dtypes['va']): + df['va'] = df['va'].astype(int) + if 'list' in str(type(df['va'][0])): + f = lambda x: x[0] + df['va'] = df['va'].map(f) + # 这是分组的 + for key, tmp_df in df.groupby('va'): + + bins_s = pd.cut(tmp_df['values'], bins=bins, + right=False).value_counts() + bins_s.sort_index(inplace=True) + total = int(bins_s.sum()) + if res['time_particle'] == 'total': + resp['list']['合计'] = dict() + + resp['list']['合计'] = {'n': bins_s.to_list(), 'total': total, + 'p': round(bins_s * 100 / total, 2).to_list(), + 'title': '总体'} + else: + p=round(bins_s * 100 / total, 2).to_list() + for i in range(len(p)): + if str(p[i]) == 'nan': + p[i] = 0 + if 'time' not in columnName: + resp['list'][key] = dict() + resp['list'][key] = {'n': bins_s.to_list(), 'total': total, + 'p': [str(i)+'%' for i in p], + 'title': '总体'} + else: + resp['list'][key.strftime('%Y-%m-%d %H:%M:%S')] = dict() + resp['list'][key.strftime('%Y-%m-%d %H:%M:%S')] = {'n': bins_s.to_list(), 'total': total, + 'p': [str(i)+'%' for i in p], + 'title': '总体'} + # 兼容下载功能 + download = analysis.event_view.get('download', '') + if download == 1: + create_df = create_neidf(resp,columnName) + Download=Download_xlsx(create_df, '分布分析') + return Download + return schemas.Msg(code=0, msg='ok', data=resp) + else: + resp = {'list': {}, 'label': [], + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'], + 'biaotou':columnName + } + labels = [str(i) for i in sorted(df['values'].unique())] + resp['label'] = labels + if 'list' in str(type(df['va'][0])): + f = lambda x: x[0] + df['va'] = df['va'].map(f) + for key, tmp_df in df.groupby(['va']): + total = len(tmp_df) + if res['time_particle'] == 'total': + dt = '合计' + else: + dt = key + #dt = key.strftime('%Y-%m-%d') + #dt='合计' + + labels_dict = {} + for key2, tmp_df2 in tmp_df.groupby('values'): + label = str(key2) + n = len(tmp_df2) + labels_dict[label] = n + if event_type == 'pay': + # 如需要2之后所有之和,则执行下面代码,返回值为字典的labels_dict01 + labels_dict01 = {} + v = -1 + for i in labels: + v += 1 + if int(i) == 1: + labels_dict01["1"] = labels_dict["1"] + else: + # for number in labels_dict.keys(): + # if number >=i: + values = list(labels_dict.values()) + n = sum(values[v:]) + labels_dict01[i] = n + # 传入百分比数据 + list_p = [] + for i in labels: + number_int = round(labels_dict01.get(i, 0) * 100 / total, 2) + number_str = str(number_int) + '%' + list_p.append(number_str) + + resp['list'][dt] = {'n': [labels_dict01.get(i, 0) for i in labels], 'total': total, + 'p': list_p} + else: + list_p = [] + for i in labels: + number_int = round(labels_dict.get(i, 0) * 100 / total, 2) + number_str = str(number_int) + '%' + list_p.append(number_str) + resp['list'][dt] = {'n': [labels_dict.get(i, 0) for i in labels], 'total': total, + 'p': list_p} + # resp['list'][dt] = {'总体': {'n': [labels_dict.get(i, 0) for i in labels], 'total': total, + # 'p': [round(labels_dict.get(i, 0) * 100 / total, 2) for i in labels]}} + if where == "step_id" and event_type == "guide": + sql = f"""SELECT toDate(addHours({game}.event."#event_time", 8)) AS date, count(DISTINCT {game}.event."#account_id") AS values FROM {game}.event WHERE addHours({game}.event."#event_time", 8) >= '{start_date}' AND addHours({game}.event."#event_time", 8) <= '{end_date}' AND {game}.event."#event_name" = 'create_account' GROUP BY toDate(addHours({game}.event."#event_time", 8)) ORDER BY date""" + df = await ckdb.query_dataframe(sql) + for i in range(len(df)): + resp['list'][str(df['date'][i])]['total'] = int(df['values'][i]) + # 兼容下载功能 + download = analysis.event_view.get('download', '') + if download == 1: + create_df = create_neidf(resp,columnName) + Download=Download_xlsx(create_df, '分布分析') + return Download + return schemas.Msg(code=0, msg='ok', data=resp) + else: + return schemas.Msg(code=-9, msg='没有添加分组项', data='') + +@router.post("/trace_model_sql") +async def trace_model_sql( + request: Request, + game: str, + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """路径分析 sql""" + await analysis.init(data_where=current_user.data_where) + data = await analysis.trace_model_sql() + return schemas.Msg(code=0, msg='ok', data=[data]) + + +@router.post("/trace_model") +async def trace_model_sql( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + analysis: BehaviorAnalysis = Depends(BehaviorAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """路径分析""" + await analysis.init(data_where=current_user.data_where) + res = await analysis.trace_model_sql() + sql = res['sql'] + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + chain_dict = defaultdict(dict) + nodes = {'流失'} + for event_names, count in zip(df['event_chain'], df['values']): + chain_len = len(event_names) + for i, event_name in enumerate(event_names): + if i >= 10: + continue + next_event = event_names[i + 1] if i < chain_len - 1 else '流失' + key = (f'{event_name}{i}', f'{next_event}{i + 1}') + nodes.update(key) + chain_dict[i][key] = chain_dict[i].setdefault(key, 0) + count + + links = [] + for _, items in chain_dict.items(): + for keys, val in items.items(): + links.append({ + "source": keys[0], + "target": keys[1], + "value": val + }) + # nodes = set() + # for item in links: + # nodes.update(( + # item['source'], + # item['target']) + # ) + data = { + 'nodes': [{'name': item} for item in nodes], + 'links': links, + 'start_date': res['start_date'], + 'end_date': res['end_date'], + 'time_particle': res['time_particle'] + } + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/user_property_model_sql") +async def user_property_sql( + request: Request, + game: str, + analysis: UserAnalysis = Depends(UserAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """用户属性sql""" + await analysis.init(data_where=current_user.data_where) + data = analysis.property_model() + return schemas.Msg(code=0, msg='ok', data=[data]) + + +@router.post("/user_property_model_export") +async def user_property_model_export( + request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + analysis: UserAnalysis = Depends(UserAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +): + """用户属性 导出""" + await analysis.init(data_where=current_user.data_where) + data = analysis.property_model() + file_name = quote(f'用户属性.xlsx') + mime = mimetypes.guess_type(file_name)[0] + + sql = data['sql'] + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + df_to_stream = DfToStream((df, '用户属性')) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + + +@router.post("/user_property_model") +async def user_property_model( + request: Request, + game: str, + analysis: UserAnalysis = Depends(UserAnalysis), + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """用户属性分析""" + await analysis.init(data_where=current_user.data_where) + res = analysis.property_model() + sql = res['sql'] + quota = res['quota'] + groupby = res['groupby'] + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='无数据', data=None) + + # 没有分组 + data = {} + + if not groupby: + data['总体'] = int(df['values'][0]) + title = ['总体', quota] + + else: + sum_s = df.groupby(groupby)['values'].sum() + for key, val in sum_s.items(): + if isinstance(key, tuple): + key = ','.join([str(i) for i in key]) + else: + key = str(key) + data[key] = val + title = ['.'.join(groupby), quota] + + return schemas.Msg(code=0, msg='ok', data={ + 'value': data, + 'title': title + }) + +@router.post("/seek_user") +async def user_property_model( + request: Request, + game: str, + data_in: schemas.Ck_seek_user, + ckdb: CKDrive = Depends(get_ck_db) +) -> schemas.Msg: + """游戏用户搜索功能""" + #判断的内容 + data=data_in.condition + #需要判断的字段 + ziduan=data_in.user_arrt_title + #筛选条件 + tiaojian=data_in.comparator_id + if tiaojian == '==': + tiaojian = '=' + #判断是否是时间类型 + if data_in.user_arrt_type == 'datetime': + sql = f"""select `#account_id`,`{ziduan}` from {game}.`user` WHERE addHours(`{ziduan}`, 8) >= '{data_in.start_time}' + and addHours(`{ziduan}`, 8) <= '{data_in.end_time}' ORDER BY `#reg_time` LIMIT 10 OFFSET {(data_in.pages-1)*10}""" + #如果查询'#account_id',则不多余返回一个account_id + elif ziduan == '#account_id': + sql=f"""select `{ziduan}`,name from {game}.`user` WHERE `{ziduan}` {tiaojian} '{data_in.condition}' ORDER BY `#reg_time` LIMIT 10 OFFSET {(data_in.pages-1)*10} """ + elif data_in.user_arrt_type == 'int': + sql=f"""select `#account_id`,`{ziduan}` from {game}.`user` WHERE `{ziduan}` {tiaojian} {data_in.condition} ORDER BY `#reg_time` LIMIT 10 OFFSET {(data_in.pages-1)*10}""" + else: + sql=f"""select `#account_id`,`{ziduan}` from `{game}`.`user` WHERE `{ziduan}` {tiaojian} '{data}' ORDER BY `#reg_time` LIMIT 10 OFFSET {(data_in.pages-1)*10}""" + # 查询数据 + try: + df = await ckdb.query_dataframe(sql) + except Exception as e: + return schemas.Msg(code=0, msg='查询参数不匹配', data=e) + # 转换成列表返回 + df.fillna(0, inplace=True) + account_id=list(df['#account_id']) + new_sql=f"""select `#account_id`,`#ip`,`#distinct_id`,rmbmoney,owner_name,lv,zhanli,channel, + channel,svrindex,maxmapid,name,`exp`,vip,jinbi,last_account_login_time,binduid from {game}.`user` where `#account_id` in ({account_id})""" + df1= await ckdb.query_dataframe(new_sql) + new_values=df1.values.tolist() + for i in range(len(new_values)): + if str(new_values[i][6]) == 'nan': + new_values[i][6]=0 + res = {'refer':{ + 'columns': df.columns.tolist(), + 'values': df.values.tolist() + }, + 'details_data':{ + 'new_columns':df1.columns.tolist(), + 'new_values':new_values + }} + return schemas.Msg(code=0, msg='ok', data=res) +@router.post("/seek_user_count") +async def user_property_model( + request: Request, + game: str, + data_in: schemas.Ck_seek_user, + ckdb: CKDrive = Depends(get_ck_db) +) -> schemas.Msg: + """游戏用户搜索功能查询到的数量""" + #判断的内容 + data=data_in.condition + #需要判断的字段 + ziduan=data_in.user_arrt_title + #筛选条件 + tiaojian=data_in.comparator_id + if tiaojian == '==': + tiaojian = '=' + #判断是否是时间类型 + if data_in.user_arrt_type == 'datetime': + sql = f"""select `#account_id`,`{ziduan}` from {game}.`user` WHERE addHours(`{ziduan}`, 8) >= '{data_in.start_time}' + and addHours(`{ziduan}`, 8) <= '{data_in.end_time}' ORDER BY `#reg_time`""" + #如果查询'#account_id',则不多余返回一个account_id + elif ziduan == '#account_id': + sql=f"""select `{ziduan}`,name from {game}.`user` WHERE `{ziduan}` {tiaojian} '{data_in.condition}' ORDER BY `#reg_time` """ + elif data_in.user_arrt_type == 'int': + sql = f"""select `#account_id`,`{ziduan}` from {game}.`user` WHERE `{ziduan}` {tiaojian} {data_in.condition} ORDER BY `#reg_time`""" + else: + sql = f"""select `#account_id`,`{ziduan}` from `{game}`.`user` WHERE `{ziduan}` {tiaojian} '{data}' ORDER BY `#reg_time`""" + #查询数据 + try: + df = await ckdb.query_dataframe(sql) + except Exception as e: + return schemas.Msg(code=0, msg='查询参数不匹配', data=e) + #返回查询到的数量 + res=len(df) + return schemas.Msg(code=0, msg='ok', data=res) + +@router.post("/download_user") +async def user_property_model( + request: Request, + game: str, + data_in: schemas.Ck_seek_user, + ckdb: CKDrive = Depends(get_ck_db) +): + """下载查询到的所有数据""" + #判断的内容 + data=data_in.condition + #需要判断的字段 + ziduan=data_in.user_arrt_title + #筛选条件 + tiaojian=data_in.comparator_id + if tiaojian == '==': + tiaojian = '=' + #判断是否是时间类型 + if data_in.user_arrt_type == 'datetime': + sql = f"""select `#account_id`,`{ziduan}` from {game}.`user` WHERE addHours(`{ziduan}`, 8) >= '{data_in.start_time}' + and addHours(`{ziduan}`, 8) <= '{data_in.end_time}' ORDER BY `#reg_time`""" + #如果查询'#account_id',则不多余返回一个account_id + elif ziduan == '#account_id': + sql=f"""select `{ziduan}`,name from {game}.`user` WHERE `{ziduan}` {tiaojian} '{data_in.condition}' ORDER BY `#reg_time` """ + elif data_in.user_arrt_type == 'int': + sql = f"""select `#account_id`,`{ziduan}` from {game}.`user` WHERE `{ziduan}` {tiaojian} {data_in.condition} ORDER BY `#reg_time`""" + else: + sql = f"""select `#account_id`,`{ziduan}` from `{game}`.`user` WHERE `{ziduan}` {tiaojian} '{data}' ORDER BY `#reg_time`""" + #查询数据 + try: + df = await ckdb.query_dataframe(sql) + except Exception as e: + return schemas.Msg(code=0, msg='查询参数不匹配', data=e) + if df.empty: + return schemas.Msg(code=-9, msg='无数据',data='') + account_id = list(df['#account_id']) + new_sql = f"""select `#account_id`,`#ip`,`#distinct_id`,rmbmoney,owner_name,lv,zhanli,channel, + channel,svrindex,maxmapid,name,`exp`,vip,jinbi,last_account_login_time,binduid from {game}.`user` where `#account_id` in ({account_id})""" + df1 = await ckdb.query_dataframe(new_sql) + file_name=quote(f'下载的用户搜索数据.xlsx') + mime = mimetypes.guess_type(file_name)[0] + df_to_stream = DfToStream((df1, '下载的用户搜索数据')) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) + +@router.post("/solo_user") +async def user_property_model( + request: Request, + game: str, + data_in: schemas.Ck_solo_user, + ckdb: CKDrive = Depends(get_ck_db) +): + """用户的详情""" + if data_in.event_list == []: + return schemas.Msg(code=-9, msg='请配置用户搜索模块事件', data=[]) + event_dict={} + for i in data_in.event_list: + event_dict[i['event']]=i['event_name'] + # event_dict={'pay':'充值','create_account':'创建角色','login':'登录','ta_app_end':'离开游戏','guide':'新手引导','level_up':'玩家等级', + # 'vip_level':'vip等级','sign':'签到','summon':'招募','ask_for_join_guild':'加入联盟','leave_guild':'离开联盟','create_guild':'创建联盟'} + sql=f"""select `#account_id`,`#ip`,`#distinct_id`,rmbmoney,owner_name,lv,zhanli,channel, + channel,svrindex,maxmapid,name,`exp`,vip,jinbi,last_account_login_time,binduid from {game}.`user` where `#account_id` = '{data_in.account_id}'""" + #获取用户基本详情 + df= await ckdb.query_dataframe(sql) + #获取用户每天事件量 + start_times=data_in.start_time.split(' ')[0] + end_times=data_in.end_time.split(' ')[0] + event = list(event_dict.keys()) + sql1 = f"""select toDate(addHours(`#event_time`, `#zone_offset`)) as date,count(`#event_name`) as v from {game}.event + where `date`>='{start_times}' and `date`<='{end_times}' + and `#account_id`='{data_in.account_id}' and `#event_name` in ({event}) group by date ORDER by date""" + df1=await ckdb.query_dataframe(sql1) + #时间间隔天数 + global event_values, data_list,game_details,zhanbi + if len(df1) >0: + time_interval=getEveryDay(start_times,end_times) + a = list(df1['date']) + aa=[] + for i in a: + aa.append(str(i)) + for i in time_interval: + if i not in aa: + df1.loc[len(df1.index)] = [i, 0] + df1[['date']]=df1[['date']].astype(str) + df1.sort_values('date',inplace=True) + data_list=list(df1['date']) + event_values=list(df1['v']) + else: + data_list= [] #getEveryDay(start_times,end_times) + event_values=[] + #获取用户事件的详情 + sql2=f"""select * FROM {game}.event WHERE `#account_id`='{data_in.account_id}' and addHours(`#event_time`, `#zone_offset`) >='{data_in.start_time}' and +addHours(`#event_time`, `#zone_offset`) <= '{data_in.end_time}' and `#event_name` in ({event}) order by `#event_time`""" + df2=await ckdb.query_dataframe(sql2) + if len(df2) > 0: + game_details={} + #区分天数 + days=list(df2['#event_time']) + day_set=set() + for i in days: + day_set.add(str(i).split(' ')[0]) + #总日期,一天的 + day_list=list(day_set) + day_list.sort() + for day in day_list: + game_deta = [] + for nu in range(len(df2)): + if day in str(df2['#event_time'][nu]): + #详细时间 + game_detail={} + time_s=str(df2['#event_time'][nu]).split('+')[0] + game_detail['time']=time_s.split(' ')[1] + game_detail['event']=event_dict[df2['#event_name'][nu]] + a_list = [] + #获取df的字段名 + columns=df2.columns.values + for col in columns: + a=str(df2[col][nu]) + if a != 'None' and a != '' and a != 'nan' and a != '[]': + a_list.append({'title':col,'val':a}) + game_detail['xaingqing']=a_list + game_deta.append(game_detail) + game_details[day]=game_deta + else: + game_details = {} + #event_count = await ckdb.yesterday_event_count(game) + #求事件占比 + sql3=f"""select `#event_name` as a,count(`#event_name`) as v from {game}.event + where addHours(`#event_time`, `#zone_offset`)>='{data_in.start_time}' and addHours(`#event_time`, `#zone_offset`)<='{data_in.end_time}' + and `#account_id`='{data_in.account_id}' and `#event_name` in ({event}) group by `#event_name`""" + df3 = await ckdb.query_dataframe(sql3) + if len(df3) > 0: + zhanbi=[] + sums=sum(list(df1['v'])) + numbers=0 + for i in range(len(df3)): + shuju={} + shuju['name']=event_dict[df3['a'][i]] + shuju['value']=int(df3['v'][i]) + # if i != len(df3)-1: + # number1=round(int(df3['v'][i]) / sums, 2) + # number=round(number1*100,2) + # numbers+=number + # shuju['zhanbi'] = str(number) + '%' + # else: + # shuju['zhanbi']=str(100-numbers) + '%' + zhanbi.append(shuju) + else: + zhanbi = [] + res = { + 'details_data':{ + 'new_columns':df.columns.tolist(), + 'new_values':df.values.tolist()}, + 'event_count':{ + 'date':data_list, + 'event_values':event_values + }, + 'details_user':game_details, + 'proportion':zhanbi + } + return schemas.Msg(code=0, msg='ok', data=res) +@router.get("/event_list") +async def event_list( + request: Request, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """个人详情中的事件列表""" + #获取事件名 + #event_list = await ckdb.distinct(game, 'event', '#event_name') + event_list = await crud.event_list.get_list(db,game) + if event_list == []: + return schemas.Msg(code=0, msg='请配置用户搜索模块事件', data=[]) + else: + res=event_list[0]['details'] + return schemas.Msg(code=0, msg='ok', data=res) + +@router.post("/add_event_list") +async def add_select_map( + request: Request, + game: str, + file: bytes = File(...), + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """添加对应游戏事件选择映射""" + dfs = pd.read_excel(file, engine='openpyxl', sheet_name=None) + for attr_name, df in dfs.items(): + #将id这列转换成字符串类型 + if len(df) >0: + ColNames = df.columns.tolist() + event = df.to_dict('records') + details=[] + for i in event: + details_dict={} + details_dict['event']=i[ColNames[0]] + details_dict['event_name']=i[ColNames[1]] + details.append(details_dict) + data_in = schemas.Event_list(game=game, details=details) + await crud.event_list.save(db, data_in) + return schemas.Msg(code=0, msg='ok', data=1) diff --git a/api/api_v1/endpoints/report.py b/api/api_v1/endpoints/report.py new file mode 100644 index 0000000..4938414 --- /dev/null +++ b/api/api_v1/endpoints/report.py @@ -0,0 +1,166 @@ +from typing import Any + +import pymongo +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase +import crud, schemas + +from db import get_database +from api import deps +from utils import get_uid + +router = APIRouter() + + +@router.post("/create") +async def create( + request: Request, + data_in: schemas.ReportCreate, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """新建报表""" + try: + await crud.report.create(db, data_in, user_id=request.user.id) + except pymongo.errors.DuplicateKeyError: + return schemas.Msg(code=-1, msg='error', data='报表已存在') + + return schemas.Msg(code=0, msg='ok', data='创建成功') + + +@router.post("/edit") +async def edit( + request: Request, + data_in: schemas.ReportEdit, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """编辑报表""" + res = await crud.report.update_one(db, {'_id': data_in.report_id, 'user_id': request.user.id}, + {'$set': {'query': data_in.query, 'name': data_in.name, 'desc': data_in.desc}}) + # if not res.matched_count: + # #if res.matched_count: + # return schemas.Msg(code=-1, msg='只能报表所有者编辑') + return schemas.Msg(code=0, msg='ok', data='编辑成功') + + +@router.post("/copy") +async def copy( + request: Request, + data_in: schemas.ReportCopy, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """复制报表到其他项目""" + + for report_id in data_in.report_ids: + new_report = await crud.report.get(db, report_id) + if not new_report: + continue + new_report_id = get_uid() + new_report['_id'] = new_report_id + new_report['project_id'] = data_in.dest_project_id + await crud.report.insert_one(db, new_report) + return schemas.Msg(code=0, msg='ok', data='编辑成功') + + +@router.post("/read_report") +async def read_report( + request: Request, + data_in: schemas.ReportRead, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> Any: + """获取已建报表 属于自己的""" + ext_where = dict() + dashboard = dict() + if data_in.report_id: + ext_where = {'_id': {'$in': data_in.report_id}} + else: + ext_where['user_id'] = request.user.id + if data_in.dashboard_id: + dashboard = await crud.dashboard.get(db, id=data_in.dashboard_id) + # projection = {'query': False} + projection = None + reports = await crud.report.read_report(db, project_id=data_in.project_id, + projection=projection, **ext_where) + + for item in reports: + item['added'] = False + # item['name'] = item['name'] + item['show_config'] = dict() + added_ids = {item['report_id']: item for item in dashboard.get('reports', [])} + if item['_id'] in added_ids: + item['added'] = True + item['show_config'] = added_ids[item['_id']] + #保存的看板按备注显示的数据显示 + if type(item['query']['events']) == list: + event_show_name = await crud.event_mana.get_all_show_name(db, game) + for i in item['query']['events']: + if 'event_name' in i: + if i['event_name'] in event_show_name: + if 'event_desc' in i : + event_name= i['event_name'] + i['event_desc']= event_show_name[event_name] + else: + event_name = i['event_name'] + i['eventDesc'] = event_show_name[event_name] + else: + if i['eventName'] in event_show_name: + if 'event_desc' in i : + event_name= i['eventName'] + i['event_desc']= event_show_name[event_name] + else: + event_name = i['eventName'] + i['eventDesc'] = event_show_name[event_name] +#放置争霸 + if type(item['query']['events']) == dict: + data_attr = await crud.data_attr.find_many(db, {'game': game}) + data_attr = {item['name']: item for item in data_attr} + item_dict=item['query']['events'] + for k,v in item_dict.items(): + if k == 'quotaDesc': + if item_dict['quotaDesc'] in data_attr: + item_dict['quotaDesc']=data_attr[item_dict['quotaDesc']]['show_name'] + + + + + + # for k,v in event_show_name.items(): + # if 'event_desc' in item['query']['events'][0]: + # event_desc = item['query']['events'][0]['event_desc'] + # if k == event_desc: + # item['query']['events'][0]['event_desc'] = event_show_name[event_desc] + # else: + # event_desc = item['query']['events'][0]['eventDesc'] + # if k == event_desc: + # item['query']['events'][0]['eventDesc'] = event_show_name[event_desc] + + reports = sorted(reports, key=lambda x: x.get('show_config', {'sort': 999}).get('sort', 999) or 999) + + return schemas.Msg(code=0, msg='ok', data=reports) + + +@router.post("/delete") +async def delete( + request: Request, + data_in: schemas.ReportDelete, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """删除报表""" + # 删除Report 自己创建的 + del_report = await crud.report.delete(db, {'_id': data_in.id, 'user_id': current_user.id}) + # 从看板弹出 + del_item = {'report_id': data_in.id} + await crud.dashboard.update_many(db, {}, {'$pull': {'reports': del_item}}) + + if del_report.deleted_count == 0: + return schemas.Msg(code=-1, msg='error', data='删除失败') + return schemas.Msg(code=0, msg='ok', data='删除成功') diff --git a/api/api_v1/endpoints/space.py b/api/api_v1/endpoints/space.py new file mode 100644 index 0000000..2e5c802 --- /dev/null +++ b/api/api_v1/endpoints/space.py @@ -0,0 +1,99 @@ +import pymongo +from fastapi import APIRouter, Depends +from motor.motor_asyncio import AsyncIOMotorDatabase +import crud, schemas + +from db import get_database +from api import deps + +router = APIRouter() + + +@router.post("/create") +async def create( + game: str, + data_in: schemas.SpaceCreate, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """创建空间""" + try: + if data_in.is_all_member: + data_in.members.clear() + users = await crud.user.find_many(db) + for user in users: + if user['_id'] == current_user.id: + continue + data_in.members.append(schemas.space.Member(**user, authority=data_in.authority)) + await crud.space.create(db, data_in, user=current_user) + except pymongo.errors.DuplicateKeyError: + return schemas.Msg(code=-1, msg='空间已存在', data='空间已存在') + + return schemas.Msg(code=0, msg='创建成功', data='创建成功') + + +@router.post("/delete") +async def delete( + data_in: schemas.SpaceDelete, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """删除空间""" + # 删除空间 自己创建的 + del_space = await crud.space.delete(db, {'_id': data_in.id}) + # 删除文件夹 + del_folder = await crud.folder.find_many(db, {'pid': data_in.id}) + del_folder_ids = [f['_id'] for f in del_folder] + await crud.folder.delete(db, {'pid': data_in.id}) + # 删除文件夹下的 dashboard + if del_folder_ids: + await crud.dashboard.delete(db, {'_id': {'$in': del_folder_ids}}) + + # 删除空间下的 dashboard + await crud.dashboard.delete(db, {'pid': data_in.id}) + + if del_space.deleted_count == 0: + return schemas.Msg(code=-1, msg='error', data='删除失败') + return schemas.Msg(code=0, msg='ok', data='删除成功') + + +@router.post("/rename") +async def rename( + data_in: schemas.SpaceRename, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """重命名空间""" + res = await crud.space.rename(db, data_in) + + return schemas.Msg(code=0, msg='ok', data=1) + + +@router.post("/set_members") +async def set_members( + data_in: schemas.AddSpaceMembers, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """设置空间成员""" + res = await crud.space.set_members(db, data_in) + return schemas.Msg(code=0, msg='ok', data=1) + + +@router.post("/detail") +async def detail( + data_in: schemas.SpaceDetail, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """空间详细""" + space_info = await crud.space.get(db, id=data_in.space_id) + exists_member = {item.get('user_id') for item in space_info.get('members', [])} + members_info = await crud.user.find_ids(db, list(exists_member)) + members_info = {item['_id']: item for item in members_info} + for item in space_info.get('members', []): + if user_info := members_info.get(item['user_id']): + item.update(schemas.UserDB(**user_info).dict(by_alias=True)) + return schemas.Msg(code=0, msg='ok', data=space_info) diff --git a/api/api_v1/endpoints/test.py b/api/api_v1/endpoints/test.py new file mode 100644 index 0000000..dd5f770 --- /dev/null +++ b/api/api_v1/endpoints/test.py @@ -0,0 +1,39 @@ +from typing import Any + +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase + +import crud +import schemas +from api import deps +from db import get_database +from db.ckdb import CKDrive, get_ck_db +from db.redisdb import RedisDrive, get_redis_pool +from models.behavior_analysis import BehaviorAnalysis + +router = APIRouter() + + +@router.post("/test") +async def test( + request: Request, + rdb: RedisDrive = Depends(get_redis_pool), + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user), + ): + + """api 列表""" + app = request.app + data = {} + for r in app.routes: + title = r.tags[0] if hasattr(r, 'description') else None + if not title: + continue + data.setdefault(title, {'list': []}) + path = r.path + name = r.description if hasattr(r, 'description') else r.name + data[title]['list'].append({'api': path, 'title': name}) + + res = [{'title': k, 'list': v['list']} for k, v in data.items()] + + return schemas.Msg(code=0, msg='ok', data=res) diff --git a/api/api_v1/endpoints/user.py b/api/api_v1/endpoints/user.py new file mode 100644 index 0000000..28153b5 --- /dev/null +++ b/api/api_v1/endpoints/user.py @@ -0,0 +1,161 @@ +from datetime import timedelta +from typing import Any + +from fastapi import APIRouter, Body, Depends, HTTPException, Request +from fastapi.security import OAuth2PasswordRequestForm +from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase + +import crud, schemas +from api import deps +from core import security +from core.config import settings +from utils import get_uid +from db import get_database + +router = APIRouter() + + +@router.post("/login") +async def login( + # data: schemas.UserLogin, + data: OAuth2PasswordRequestForm = Depends(), + db: AsyncIOMotorDatabase = Depends(get_database) +) -> Any: + """ + OAuth2兼容令牌登录,获取将来令牌的访问令牌 + """ + user = await crud.user.authenticate(db, + name=data.username, password=data.password + ) + if not user: + # raise HTTPException(status_code=400, detail="Incorrect name or password") + return schemas.Msg(code=-1, msg='密码或用户名错误') + access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) + # access_token_expires = timedelta(seconds=5) + await crud.user.update_login_time(db, data.username) + + return { + 'data': { + 'name': user.name, + 'nickname': user.nickname, + 'email': user.email, + 'tel': user.tel, + 'userid':user.id, + + 'token': security.create_access_token( + expires_delta=access_token_expires, _id=str(user.id), email=user.email, + nickname=user.nickname, + is_superuser=user.is_superuser, name=user.name, + data_where=user.data_where, + ), + "token_type": "bearer", + + }, + 'access_token': security.create_access_token( + expires_delta=access_token_expires, _id=str(user.id), email=user.email, + nickname=user.nickname, + is_superuser=user.is_superuser, name=user.name, data_where=user.data_where + ), + "token_type": "bearer", + + 'code': 0, + 'msg': 'success', + } + + +@router.get("/me", response_model=schemas.User) +def me(current_user: schemas.User = Depends(deps.get_current_user)) -> Any: + """ + Test access token + """ + return current_user + + +@router.post("/reset_password") +async def reset_password(request: Request, + game: str, + data_in: schemas.UserRestPassword, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.User = Depends(deps.get_current_user) + ) -> Any: + """ + 修改其他人密码 + """ + try: + await crud.user.reset_password(db, data_in) + except Exception as e: + return schemas.Msg(code=0, msg='修改失败', data={'username': data_in}) + return schemas.Msg(code=0, msg='ok') + + +@router.post("/reset_my_password") +async def reset_password(request: Request, + game: str, + data_in: schemas.UserRestMyPassword, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.User = Depends(deps.get_current_user) + ) -> Any: + """ + 修改自己的密码 + """ + await crud.user.reset_password(db, schemas.UserRestPassword(username=current_user.name, password=data_in.password)) + return schemas.Msg(code=0, msg='ok') + + +@router.post("/edit_profile") +async def edit_profile(request: Request, + game: str, + data_in: schemas.UserProfileEdit, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.User = Depends(deps.get_current_user) + ) -> Any: + """ + 编辑用户资料 + """ + await crud.user.edit_profile(db, data_in, user_id=request.user.id) + return schemas.Msg(code=0, msg='ok', data=data_in) + + +@router.get("/all_account") +async def all_account(page: int = 1, limit: int = 100, db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.User = Depends(deps.get_current_user) + ) -> Any: + """ + 获取所有用户 + """ + page -= 1 + if page < 0: + page = 0 + cursor = crud.user.find(db).skip(page * limit).limit(limit) + + data = [schemas.UserDB(**user) async for user in cursor] + + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/add_account") +async def all_account( + data_in: schemas.CreateAccount, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.User = Depends(deps.get_current_user) +) -> schemas.Msg: + """ + 创建新账号 + """ + created = [] + id = [] + for name in data_in.account_list: + if is_exists := await crud.user.exists(db, {'name': name}): + continue + else: + new_account = schemas.UserCreate(name=name, password='123') + created.append(name) + #创建账户并返回id + id_one=await crud.user.create(db, new_account) + id.append(id_one) + res = { + 'created_account': created, + 'password': '123', + 'id':id + } + return schemas.Msg(code=0, msg='ok', data=res) diff --git a/api/api_v1/endpoints/xquery.py b/api/api_v1/endpoints/xquery.py new file mode 100644 index 0000000..44e84f0 --- /dev/null +++ b/api/api_v1/endpoints/xquery.py @@ -0,0 +1,271 @@ +import datetime +import mimetypes +from collections import defaultdict +import time +from urllib.parse import quote +import re +from clickhouse_driver import Client +import pandas as pd +import numpy as np +from fastapi import APIRouter, Depends, Request +from motor.motor_asyncio import AsyncIOMotorDatabase +from pandas import DataFrame +from starlette.responses import StreamingResponse + +import crud, schemas +from common import * + +from api import deps +from db import get_database +from db.ckdb import get_ck_db, CKDrive, ckdb +from db.redisdb import get_redis_pool, RedisDrive + +from models.behavior_analysis import BehaviorAnalysis +from models.user_analysis import UserAnalysis +from models.x_analysis import XAnalysis +from utils import DfToStream, get_bijiao + +router = APIRouter() + + +@router.post("/ltv_model_sql") +async def ltv_model_sql( + request: Request, + game: str, + analysis: XAnalysis = Depends(XAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ ltv模型sql """ + await analysis.init(data_where=current_user.data_where) + data = analysis.ltv_model_sql() + return schemas.Msg(code=0, msg='ok', data=[data]) + + +@router.post("/ltv_model") +async def ltv_model_sql( + request: Request, + game: str, + analysis: XAnalysis = Depends(XAnalysis), + ckdb: CKDrive = Depends(get_ck_db), + current_user: schemas.UserDB = Depends(deps.get_current_user) +) -> schemas.Msg: + """ ltv模型sql """ + await analysis.init(data_where=current_user.data_where) + res = analysis.ltv_model_sql() + sql = res['sql'] + #仅一条筛选条件则是把GM过滤后获取全部数据 + if len(analysis.global_filters)==1 and analysis.global_filters[0]['strftv']=='GM': + try: + df = await ckdb.query_dataframe(sql) + except Exception as e: + return schemas.Msg(code=-9, msg='报表配置参数异常') + #多条筛选条件则合成新的sql + else: + new_sql="""""" + #拆分sql + split_sql = sql.split('AND 1') + #获取每一条筛选条件 + for i in analysis.global_filters: + #剔除GM + if i['strftv'] != 'GM': + #获取筛选条件的包含关系 + bijiao=get_bijiao(i["comparator"]) + #获取筛选条件的值 + condition=tuple(i['ftv']) + #获取事件名 + columnName=i['columnName'] + dd = f""" AND {game}.event.{columnName} {bijiao} {condition}""" + new_sql+=dd + split_="""AND 1 """ + news_sql = split_sql[0] + split_+new_sql + split_sql[1] + split_+new_sql+ split_sql[2]+split_+split_sql[3] + df = await ckdb.query_dataframe(news_sql) + # # 判断11月23号之前的数据 + # list_data_range=analysis.date_range + # liststr_data_range=[] + # for i in list_data_range: + # liststr_data_range.append(str(i)) + # quota = analysis.event_view['quota'] + # #判断是设备LTV则执行下面代码,如是角色实充LTV则不执行 + # if quota == '#distinct_id': + # if '2021-11-22' in liststr_data_range or '2021-11-22' >=liststr_data_range[-1]: + # #取搜索最后为11.23号之前的数据 + # if '2021-11-22' >=liststr_data_range[-1]: + # news_sql="""""" + # split_sql=sql.split('AND is_new_device = 1') + # new_sql=split_sql[0]+split_sql[1]+split_sql[2] + # news_sql+=new_sql + # df_twenty_three=await ckdb.query_dataframe(news_sql) + # #取包含有11.23号之前和23号之后的那一段 + # else: + # start_date=str(list_data_range[0]) + # end_date='2021-11-22' + # news_sql = """""" + # split_sql = sql.split('AND is_new_device = 1') + # for i in split_sql: + # news_sql += i + # #用正则表达式切时间 + # zhengze_time=r'\d{4}-\d{1,2}-\d{1,2}' + # zhengze_sql=re.split(zhengze_time,news_sql) + # zz_new_sql=zhengze_sql[0]+start_date+zhengze_sql[1]+end_date+zhengze_sql[2]+start_date+zhengze_sql[3]+end_date+zhengze_sql[4] + # zz_news_sql="""""" + # zz_news_sql+=zz_new_sql + # df_twenty_three = await ckdb.query_dataframe(zz_news_sql) + # #上下合并两组数据,忽略以前的索引下标 + # df= pd.concat([df,df_twenty_three], axis=0, ignore_index=True) + # df.sort_values('date', inplace=True) + # #去重 + # #df.drop_duplicates(inplace=True) + quota = res['quota'] #字段名 + ltv_n = res['ltv_n'] + #df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='查无数据') + df.fillna(0, inplace=True) #修改原对象,以0填补空缺值 + + # for d in set(res['date_range']) - set(df['date']): # 时间的差集运算 最后为空 + # df.loc[len(df)] = 0 + # df.loc[len(df) - 1, 'date'] = d + # days = (pd.Timestamp.now().date() - d).days # 时间差 + # # if days + 2 >= ltv_len: + # # continue + # df.iloc[len(df) - 1, days + 3:] = '-' + # df.sort_values('date', inplace=True) # 根据date进行倒叙排序 + + for d in set(res['date_range']) - set(df['date']): + #在有效日期最后一行补充行数据(值都为'-'),补充的行数为两个集合的差集长度 + df.loc[len(df)] = '-' + #在date此列补充多行数据(值为两个集合差集的子元素) + df.loc[len(df) - 1, 'date'] = d + # days = (d-pd.Timestamp.now().date()).days + # # if days + 2 >= ltv_len: + # # continue + # if days>0: + # df.iloc[len(df) - 1, 1:] = '-' + + df.sort_values('date', inplace=True) + df.rename(columns={'date': '注册日期'}, inplace=True) #True为将结果返回赋值给原变量,修改原对象,columns为列名 + cat = '角色数' + if quota == '#distinct_id': #如果字段名=字段名 + cat = '设备数' + df.rename(columns={'cnt1': cat}, inplace=True) #原数据基础上修改df里面列名为cnt1为设备数 + df1 = df[['注册日期', cat, *[f'LTV{i}' for i in ltv_n]]] #1, 2, 3, 4, 5, 6, 7, 8, 9, ~~到360 + df2 = df[['注册日期', cat, *[f'sumpay_{i}' for i in ltv_n]]] + df2.replace('-', 0, inplace=True) #True改变原数据,前面是需要替换的值,后面是替换后的值。 在原数据把下划线替换成0 + #修改下面代码 + # 去除sumpay_2的值为0的列 + new_df2 = (df2.drop(df2[(df2.sumpay_2 == 0)].index)) + #为new_df2排序 + new_df2=new_df2.reset_index(drop=True) + #求相差天数 + str_time = df2['注册日期'][0] + #str_time =new_df2['注册日期'][0] + str_time01=str(str_time) + split_time = str_time01.split('-') + #str_time = str(res['date_range'][0]) + # split_time = str_time.split('-') + now_time=time.strftime("%Y-%m-%d",time.localtime()) + split_now_time = now_time.split('-') + today = datetime.datetime(int(split_time[0]), int(split_time[1]), int(split_time[2])) + now_day = datetime.datetime(int(split_now_time[0]), int(split_now_time[1]), int(split_now_time[2])) + newday = (now_day - today).days + 1 + #计算方法运算每个LTV的均值 + _listData = {} + for i in ltv_n: + if i <=newday: + #计算均值 + #avgLtv = (new_df2[[f'sumpay_{i}']][0:newday + 1 - i].sum() / new_df2[cat][0:newday + 1 - i].sum()).round(2) + #12.20号计算LTV均值的时候分母包括当天未充值新增设备数,比剔除掉的计算值偏小 + avgLtv = (df2[[f'sumpay_{i}']][0:newday + 1 - i].sum() / df2[cat][0:newday + 1 - i].sum()).round(2) + #取出均值 + new_avgLtv=str(avgLtv).split('\n')[0].split(' ') + new_avgLtv01=new_avgLtv[len(new_avgLtv)-1] + if new_avgLtv01 == 'NaN': + _listData[f'sumpay_{i}'] = '-' + else: + _listData[f'sumpay_{i}'] = new_avgLtv01 + + #原代码 + # avgLtv=(df2[[f'sumpay_{i}']][0:newday+1-i].sum()/df2[cat][0:newday+1-i].sum()).round(2) + # new_avgLtv=str(avgLtv).split('\n')[0].split(' ') + # new_avgLtv01=new_avgLtv[len(new_avgLtv)-1] + # if new_avgLtv01 == 'NaN': + # _listData[f'sumpay_{i}'] = '-' + # else: + # _listData[f'sumpay_{i}'] = new_avgLtv01 + else: + _listData[f'sumpay_{i}']='-' + avgLtvlist = pd.Series(_listData) + + _listname=[] + #计算总累计LTV最后一个值 + for k, v in _listData.items(): + if v != 0 or v!= '-': + # if v !=0: + _listname.append(k) + max_nmu=max(_listname) + #max_num = (new_df2[[max_nmu]].sum() / new_df2[cat].sum()).round(2) + max_num=(df2[[max_nmu]].sum()/df2[cat].sum()).round(2) + max_number=str(max_num[0]) + df1.loc[len(df1)] = ['均值', df2[cat].sum(), *avgLtvlist] + #原代码 + #df1.loc[len(df1)] = ['均值', df2[cat].sum(), *avgLtvlist] + + # avg_ltv = (df2[[f'sumpay_{i}' for i in ltv_n]].sum() / df2[cat].sum()).round(2) + #df1.loc[len(df1)] = ['均值', df2[cat].sum(), *avg_ltv] + df1.insert(2, '累计LTV', 0) + last_ltv = [] + for items in df1.values: + for item in items[::-1]: + if item != '-': + last_ltv.append(item) + break + #修改累计LTV中最后一个值 + last_ltv[-1]=max_number + + + df1['累计LTV'] = last_ltv + + + #把列中累计LTV等于0的值改成'-' + #df1.loc[df1['累计LTV']==0, '累计LTV'] = '-' + #剔除行,列的累计LTV=='-'的剔除出去 + df3 = df1.drop(df1[(df1.LTV1 == '-')].index) + #df3 = df1.drop(df1[(df1.累计LTV=='-')].index) + + days = (pd.Timestamp.now().date() - pd.to_datetime(res['start_date']).date()).days + df1.iloc[len(df1) - 1, days + 4:] = '-' + + data = { + #'title': df1.columns.tolist(), + #'rows': df1.values.tolist(), + 'title': df3.columns.tolist(), + 'rows': df3.values.tolist(), + + 'start_date': res['start_date'], + 'end_date': res['end_date'] + } + + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/ltv_model_export") +async def ltv_model_export(request: Request, + game: str, + ckdb: CKDrive = Depends(get_ck_db), + analysis: XAnalysis = Depends(XAnalysis), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ): + """ ltv分析 数据导出""" + await analysis.init(data_where=current_user.data_where) + data = analysis.ltv_model_sql() + file_name = quote(f'lvt.xlsx') + mime = mimetypes.guess_type(file_name)[0] + + sql = data['sql'] + df = await ckdb.query_dataframe(sql) + if df.empty: + return schemas.Msg(code=-9, msg='查无数据') + df_to_stream = DfToStream((df, 'ltv')) + with df_to_stream as d: + export = d.to_stream() + return StreamingResponse(export, media_type=mime, headers={'Content-Disposition': f'filename="{file_name}"'}) \ No newline at end of file diff --git a/api/api_v1/user_label/__init__.py b/api/api_v1/user_label/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/api_v1/user_label/controller.py b/api/api_v1/user_label/controller.py new file mode 100644 index 0000000..82e04a3 --- /dev/null +++ b/api/api_v1/user_label/controller.py @@ -0,0 +1,104 @@ +from fastapi import APIRouter, Request, Depends +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from api import deps +from api.api_v1.user_label import service +from db import get_database + +router = APIRouter() + + +@router.post("/save") +async def save(request: Request, + data_in: schemas.UserLabelSave, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """用户标签保存""" + await service.save(db, data_in, request.user.username, game) + return schemas.Msg(code=0, msg='ok') + + +@router.get("/list") +async def get_list(request: Request, + # project_id: str, + game: str, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """读取项目保存的用户标签""" + data = await service.get_list(db, game) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/detail") +async def get_detail(request: Request, + game: str, + data_id: schemas.UserLabelDetail, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """读取用户标签详细""" + data = await service.get_detail(db, data_id.label_id) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/del") +async def delete(request: Request, + game: str, + data_id: schemas.UserLabelDel, + db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """删除用户标签""" + data = await service.delete(db, data_id.label_id) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/sql") +async def sql(request: Request, + data_in: schemas.UserLabelJson2Sql, + game: str, + # db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """自定义用户标签 sql测试""" + data = await service.json2sql(game, data_in.cluster_name) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/cluster_user_list") +async def cluster_user_list(request: Request, + game: str, + data_id: schemas.ReadClusterUser, + + # db: AsyncIOMotorDatabase = Depends(get_database), + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """获取该标签用户列表""" + data = await service.get_cluster_user(game, data_id.cluster_name, data_id.page, data_id.limit) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/cluster_user_count") +async def cluster_user_count(request: Request, + data_in: schemas.UserLabelJson2Sql, + game: str, + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """获取该标签用户数量""" + data = await service.get_cluster_user_count(game, data_in.cluster_name) + return schemas.Msg(code=0, msg='ok', data=data) + + +@router.post("/copy") +async def copy(request: Request, + data_in: schemas.UserLabelCopy, + game: str, + current_user: schemas.UserDB = Depends(deps.get_current_user) + ) -> schemas.Msg: + """复制标签到其他项目""" + await service.copy_to(data_in.to_game, data_in.label_id_list, request.user.usernam) + return schemas.Msg(code=0, msg='ok') diff --git a/api/api_v1/user_label/service.py b/api/api_v1/user_label/service.py new file mode 100644 index 0000000..ebe91a9 --- /dev/null +++ b/api/api_v1/user_label/service.py @@ -0,0 +1,66 @@ +import pandas as pd +import numpy as np + +import crud +import schemas +from db import get_database +from db.ckdb import get_ck_db +from models.user_label import UserClusterDef + + +async def save(db, data_in, act_user, game): + return await crud.user_label.save(db, data_in, act_user, game) + + +async def read(db, data_in): + return await crud.user_label.read(db, data_in) + + +async def get_list(db, game): + return await crud.user_label.get_list(db, game) + + +async def get_detail(db, label_id): + return await crud.user_label.get(db, label_id) + + +async def delete(db, label_id): + await crud.user_label.delete_id(db, label_id) + return True + + +async def json2sql(game, date_in): + user_cluster_def = UserClusterDef(game, date_in) + await user_cluster_def.init() + return user_cluster_def.to_sql() + + +async def get_cluster_user(game, cluster_name, page, limit): + user_cluster_def = UserClusterDef(game, cluster_name, page=page, limit=limit) + await user_cluster_def.init() + sql = user_cluster_def.cluster_user_list() + ckdb = get_ck_db() + df = await ckdb.query_dataframe(sql) + df.fillna(0, inplace=True) + return { + 'columns': df.columns.tolist(), + 'values': df.values.tolist() + } + + +async def get_cluster_user_count(game, date_in): + user_cluster_def = UserClusterDef(game, date_in) + await user_cluster_def.init() + sql = user_cluster_def.cluster_user_count() + ckdb = get_ck_db() + df = await ckdb.query_dataframe(sql) + return {'num': int(df.loc[0, 'values'])} + + +async def copy_to(to_game, ids, act_name): + db = get_database() + docs = await crud.user_label.find_ids(db, *ids) + for item in docs: + data = schemas.UserLabelSave(**item) + await crud.user_label.save(db, data, act_name, to_game) + return True diff --git a/api/deps.py b/api/deps.py new file mode 100644 index 0000000..560cbc2 --- /dev/null +++ b/api/deps.py @@ -0,0 +1,61 @@ +from fastapi import Depends, status, HTTPException +from fastapi.security import OAuth2PasswordBearer +from jose import jwt +from motor.motor_asyncio import AsyncIOMotorDatabase +from pydantic import ValidationError +from starlette.authentication import AuthenticationError + +import crud +import schemas +import utils +from core import security +from core.config import settings +from db import get_database +from db.ckdb import CKDrive, get_ck_db + +reusable_oauth2 = OAuth2PasswordBearer( + tokenUrl=f"{settings.API_V1_STR}/user/login" +) + + +def get_current_user(token: str = Depends(reusable_oauth2) + ) -> schemas.UserDB: + # def get_current_user(token: str + # ) -> schemas.UserDBBase: + try: + payload = jwt.decode( + token, settings.SECRET_KEY, algorithms=[security.ALGORITHM] + ) + user = schemas.UserDB(**payload) + except (jwt.JWTError, ValidationError): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Could not validate credentials", + ) + if not user: + raise HTTPException(status_code=404, detail="User not found") + return user + + +def get_current_user2(token: str) -> schemas.UserDB: + try: + payload = jwt.decode( + token, settings.SECRET_KEY, algorithms=[security.ALGORITHM] + ) + user = schemas.UserDB(**payload) + except (jwt.JWTError, ValidationError): + raise AuthenticationError() + if not user: + raise HTTPException(status_code=404, detail="User not found") + return user + + +async def get_game_project(game: str, db: AsyncIOMotorDatabase = Depends(get_database)) -> str: + is_exists = await crud.project.find_one(db, {'game': game}, {'_id': True}) + if not is_exists: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail='没有该项目' + ) + return game + diff --git a/ck_test.py b/ck_test.py new file mode 100644 index 0000000..60531f1 --- /dev/null +++ b/ck_test.py @@ -0,0 +1,35 @@ +from datetime import datetime + +import asyncio +from aioch import Client + +from core.config import settings + + +async def exec_progress(): + client = Client('119.29.176.224') + + progress = await client.execute_with_progress('show databases') + timeout = 20 + started_at = datetime.now() + + async for num_rows, total_rows in progress: + done = num_rows / total_rows if total_rows else total_rows + now = datetime.now() + # Cancel query if it takes more than 20 seconds to process 50% of rows. + if (now - started_at).total_seconds() > timeout and done < 0.5: + await client.cancel() + break + else: + rv = await progress.get_result() + print(rv) + + +async def exec_no_progress(): + client = Client(**settings.CK_CONFIG) + rv = await client.execute('show databases') + print(rv) + + +loop = asyncio.get_event_loop() +loop.run_until_complete(asyncio.wait([exec_no_progress()])) diff --git a/common/__init__.py b/common/__init__.py new file mode 100644 index 0000000..c8b272a --- /dev/null +++ b/common/__init__.py @@ -0,0 +1 @@ +from .compute import * \ No newline at end of file diff --git a/common/compute.py b/common/compute.py new file mode 100644 index 0000000..48f0479 --- /dev/null +++ b/common/compute.py @@ -0,0 +1,12 @@ +import numpy as np + + +def division(a, b, n=2): + res = 0 + try: + res = round(a / b, n) + if np.isnan(res) or res == np.inf: + res = 0 + except ZeroDivisionError: + pass + return res diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/config.py b/core/config.py new file mode 100644 index 0000000..0abd1f4 --- /dev/null +++ b/core/config.py @@ -0,0 +1,417 @@ +import sys +from typing import Any, Dict, List, Optional, Union + +from pydantic import AnyHttpUrl, BaseSettings, EmailStr, HttpUrl, validator +from sqlalchemy import func, and_ + + +class Settings(BaseSettings): + PROJECT_NAME: str = '人事后台' + API_V1_STR: str = '/api/v1' + + BACKEND_CORS_ORIGINS: List[str] = ['*'] + + CASBIN_COLL: str = 'casbin_rule' + + SUPERUSER_EMAIL: str = '15392746632@qq.com' + SUPERUSER_PASSWORD: str = '123456' + SUPERUSER_NAME: str = 'root' + SUPERUSER_NICKNAME: str = 'root' + ACCOUNT_COMMON_PASSWORD = 'AWDMIPOUEQfO3q84' + + DEFAULT_PASSWORD = '123456' + + ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8 + SECRET_KEY: str = 'ZaFX6EypK6PtuhGv11q4DLRvAb0csiLx4dbKUwLwCe8' + + + CK_CONFIG = {'host': '10.0.1.111', + 'port': 8123, + 'user': 'default', + 'password': '498588' + } + + CK_CALC_SYMBO = { + '==': lambda col, *val: col == val[0], + '>=': lambda col, *val: col >= val[0], + '<=': lambda col, *val: col <= val[0], + '>': lambda col, *val: col > val[0], + '<': lambda col, *val: col < val[0], + 'is not null': lambda col, *val: col.isnot(None), + 'is null': lambda col, *val: col.is_(None), + 'like': lambda col, *val: col.like(f'%{val[0]}%'), + 'not like': lambda col, *val: col.notlike(f'%{val[0]}%'), + 'in': lambda col, *val: col.in_(val[0]), + 'not in': lambda col, *val: col.notin_(val[0]), + '!=': lambda col, *val: col != val[0], + 'range': lambda col, *val: and_(col >= val[0], col < val[1]) + + } + + CK_TYPE_DICT = {"DateTime('UTC')": 'datetime', + "Nullable(DateTime('UTC'))": 'datetime', + "DateTime()": 'datetime', + + "Nullable(IPv4)": 'string', + "IPv4": 'string', + + "String": 'string', + "Nullable(String)": 'string', + + "Nullable(UInt8)": 'int', + "UInt8": 'string', + + "Nullable(Int8)": 'int', + "Int8": 'string', + + "Nullable(UInt16)": 'int', + "UInt16": 'string', + + "Nullable(Int16)": 'int', + "Int16": 'string', + + "Nullable(UInt32)": 'int', + "UInt32": 'string', + + "Nullable(UInt64)": 'int', + "UInt64": 'string', + + "Nullable(Int64)": 'int', + "Int64": 'string', + + "Array(String)": 'array', + + "Nullable(Float)": 'float', + "Float": 'float', } + + CK_FUNC = { + 'sum': lambda x: func.sum(x), + 'avg': lambda x: func.round(func.avg(x), 2), + 'median': lambda x: func.median(x), + 'max': lambda x: func.max(x), + 'min': lambda x: func.min(x), + 'distinct_count': lambda x: func.uniqExact(x), + 'uniqExact': lambda x: func.uniqExact(x), + } + + CK_OPERATOR = { + 'int': [{ + 'id': 'sum', + 'title': '总和' + }, { + 'id': 'avg', + 'title': '均值' + }, { + 'id': 'median', + 'title': '中位数' + }, { + 'id': 'max', + 'title': '最大值' + }, { + 'id': 'min', + 'title': '最小值' + }, { + 'id': 'distinct_count', + 'title': '去重数' + }, + + ], + 'string': [{ + 'id': 'uniqExact', + 'title': '去重数' + }], + 'datetime': [{ + 'id': 'uniqExact', + 'title': '去重数' + }], + 'float': [{ + 'id': 'sum', + 'title': '总和' + }, { + 'id': 'avg', + 'title': '均值' + }, { + 'id': 'median', + 'title': '中位数' + }, { + 'id': 'max', + 'title': '最大值' + }, { + 'id': 'min', + 'title': '最小值' + }, { + 'id': 'distinct_count', + 'title': '去重数' + }, + + ], + 'array': [ + { + 'id': 'list_distinct', + 'title': '列表去重数' + }, + { + 'id': 'set_distinct', + 'title': '集合去重数' + }, + { + 'id': 'ele_distinct', + 'title': '元素去重数' + }, + ] + } + + CK_FILTER = { + 'int': [{ + 'id': '==', + 'title': '等于' + }, { + 'id': '!=', + 'title': '不等于' + }, { + 'id': '<', + 'title': '小于' + }, { + 'id': '<=', + 'title': '小于等于' + }, { + 'id': '>', + 'title': '大于' + }, { + 'id': '>=', + 'title': '大于等于' + }, { + 'id': 'is not null', + 'title': '有值' + }, { + 'id': 'is null', + 'title': '无值' + }, { + 'id': 'range', + 'title': '区间' + }, + ], + 'string': [{ + 'id': '==', + 'title': '等于' + }, { + 'id': '!=', + 'title': '不等于' + }, { + 'id': 'like', + 'title': '包含' + }, { + 'id': 'not like', + 'title': '不包含' + }, { + 'id': 'is not null', + 'title': '有值' + }, { + 'id': 'is null', + 'title': '无值' + }, { + 'id': 'in', + 'title': '条件多选' + #'title': '在列表里' + }, + # { + # 'id': 'regex', + # 'title': '正则匹配' + # }, { + # 'id': 'not regex', + # 'title': '正则不匹配' + # }, + ], + 'float': [{ + 'id': '==', + 'title': '等于' + }, { + 'id': '!=', + 'title': '不等于' + }, { + 'id': '<', + 'title': '小于' + }, { + 'id': '>', + 'title': '大于' + }, { + 'id': 'is not null', + 'title': '有值' + }, { + 'id': 'is null', + 'title': '无值' + }, + # { + # 'id': 'range', + # 'title': '区间' + # }, + ], + 'datetime': [ + { + 'id': '>', + 'title': '大于' + }, + { + 'id': '>=', + 'title': '大于等于' + }, + { + 'id': '<', + 'title': '小于' + }, + { + 'id': '<=', + 'title': '小于等于' + }, + { + 'id': 'is not null', + 'title': '有值' + }, + { + 'id': 'is null', + 'title': '无值' + }, + ], + 'user_label': [ + { + 'id': 'in', + 'title': '是' + }, + { + 'id': 'not in', + 'title': '不是' + }, + ], + 'array': [ + { + 'id': 'is not null', + 'title': '有值' + }, + { + 'id': 'is null', + 'title': '无值' + } + ] + } + ARITHMETIC = { + '+': lambda x, y: x + y, + '-': lambda x, y: x - y, + '*': lambda x, y: x * y, + '/': lambda x, y: x / y, + #'%': lambda x, y:(x)-int(x/y)*(y) 取模用 + } + + PROPHET_TIME_GRAIN_MAP = { + "PT1S": "S", + "PT1M": "min", + "PT5M": "5min", + "PT10M": "10min", + "PT15M": "15min", + "PT0.5H": "30min", + "PT1H": "H", + "P1D": "D", + "P1W": "W", + "P1M": "MS", + "total": "D", + } + + TIME_GRAIN_EXPRESSIONS = { + 'PT1S': lambda col, zone: func.toStartOfSecond(func.addHours(col, zone)).label('date'), + 'PT1M': lambda col, zone: func.toStartOfMinute(func.addHours(col, zone)).label('date'), + 'PT5M': lambda col, zone: func.toStartOfFiveMinute(func.addHours(col, zone)).label('date'), + 'PT10M': lambda col, zone: func.toStartOfTenMinutes(func.addHours(col, zone)).label('date'), + 'PT15M': lambda col, zone: func.toStartOfFifteenMinutes(func.addHours(col, zone)).label('date'), + 'PT1H': lambda col, zone: func.toStartOfHour(func.addHours(col, zone)).label('date'), + 'P1D': lambda col, zone: func.toDate(func.addHours(col, zone)).label('date'), + 'total': lambda col, zone: func.toStartOfDay(func.addHours(col, zone)).label('date'), + 'P1W': lambda col, zone: func.toStartOfWeek(func.addHours(col, zone)).label('date'), + 'P1M': lambda col, zone: func.toStartOfMonth(func.addHours(col, zone)).label('date'), + 'HOUR': lambda col, zone: func.toHour(func.addHours(col, zone)).label('date'), + } + + DEFAULT_FIELD: dict = { + '#ip': 'ipv4', + '#country': 'string', + '#province': 'string', + '#city': 'string', + '#os': 'string', + '#device_id': 'string', + '#screen_height': 'integer', + '#screen_width': 'integer', + '#device_model': 'string', + '#app_version': 'string', + '#bundle_id': 'string', + '#app_name': 'string', + '#game_version': 'string', + '#os_version': 'string', + '#network_type': 'string', + '#carrier': 'string', + '#manufacturer': 'string', + '#app_id': 'string', + '#account_id': 'string', + '#distinct_id': 'string', + 'binduid': 'string', + 'channel': 'string', + 'owner_name': 'string', + 'role_name': 'string', + 'exp': 'integer', + 'zhanli': 'integer', + 'maxmapid': 'integer', + 'mapid': 'integer', + 'ghid': 'string', + 'rmbmoney': 'integer', + 'jinbi': 'integer', + 'svrindex': 'string', + 'lv': 'integer', + 'vip': 'integer', + 'game': 'string', + + # 'unitPrice': 'integer', + # 'money': 'string', + # 'isdangrishouci': 'integer', + # 'islishishouci': 'integer', + # 'is_today_reg': 'integer', + # 'orderid': 'string', + # 'proid': 'string', + # + # 'step_id': 'integer', + # 'step_group': 'integer', + # 'guide_start_time': 'integer', + # + # 'online_ts': 'integer' + } + + class Config: + case_sensitive = True + + +# class Debug(Settings): +# MDB_HOST: str = '10.0.0.9' +# MDB_PORT: int = 27017 +# MDB_USER: str = 'root' +# MDB_PASSWORD: str = 'iamciniao' +# MDB_DB: str = 'hr_system' +# +# DATABASE_URI = f'mongodb://{MDB_USER}:{MDB_PASSWORD}@{MDB_HOST}:{MDB_PORT}/admin' +#本地MongoDB的库测试 +class Debug(Settings): + MDB_HOST: str = '127.0.0.1' + MDB_PORT: int = 27017 + MDB_DB: str = 'hr_system' + + DATABASE_URI = f'mongodb://{MDB_HOST}:{MDB_PORT}/admin' + +class Produce(Settings): + MDB_HOST: str = '127.0.0.1' + MDB_PORT: int = 27017 + MDB_USER: str = 'root' + MDB_PASSWORD: str = 'iamciniao' + MDB_DB: str = 'hr_system' + + DATABASE_URI = f'mongodb://{MDB_USER}:{MDB_PASSWORD}@{MDB_HOST}:{MDB_PORT}/admin' + + +if sys.platform == 'linux': + settings = Produce() +else: + settings = Debug() diff --git a/core/security.py b/core/security.py new file mode 100644 index 0000000..bd16324 --- /dev/null +++ b/core/security.py @@ -0,0 +1,32 @@ +from datetime import datetime, timedelta + +from jose import jwt +from passlib.context import CryptContext + +from core.config import settings + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +ALGORITHM = "HS256" + + +def create_access_token( + expires_delta: timedelta = None, **payload +) -> str: + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta( + minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES + ) + payload["exp"] = expire + encoded_jwt = jwt.encode(payload, settings.SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password: str) -> str: + return pwd_context.hash(password) diff --git a/crud/__init__.py b/crud/__init__.py new file mode 100644 index 0000000..8094eaa --- /dev/null +++ b/crud/__init__.py @@ -0,0 +1,23 @@ +from .crud_user import user +from .crud_project import project +from .crud_folder import folder +from .crud_space import space +from .crud_dashboard import dashboard +from .crud_report import report +from .crud_authority import authority +from .crud_data_auth import data_auth +from .crud_data_attr import data_attr +from .crud_api_log import api_log +from .crud_event_mana import event_mana +from .crud_api_list import api_list +from .crud_role import role +from .crud_check_data import check_data +from .user_label import user_label +from .select_map import select_map +from .crud_project_number import project_number +from .crud_proid_map import proid_map +from .crud_api_board import api_board +from .crud_url_list import url_list +from .crud_user_url import user_url +from .crud_api_module import api_module +from .crud_event_list import event_list \ No newline at end of file diff --git a/crud/base.py b/crud/base.py new file mode 100644 index 0000000..a9d1af4 --- /dev/null +++ b/crud/base.py @@ -0,0 +1,62 @@ +from typing import Union + +from bson import ObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + + +class CRUDBase: + def __init__(self, coll_name): + self.coll_name = coll_name + + async def get(self, db, id: Union[ObjectId, str], *args, **kwargs) -> dict: + return (await db[self.coll_name].find_one({'_id': id}, *args, **kwargs)) or dict() + + async def insert_one(self, db, document): + return await db[self.coll_name].insert_one(document) + + async def find_one(self, db, filter=None, *args, **kwargs): + return (await db[self.coll_name].find_one(filter, *args, **kwargs)) or dict() + + async def exists(self, db, filter=None, *args, **kwargs): + return bool(await db[self.coll_name].find_one(filter, *args, **kwargs)) or False + + async def read_have(self, db, v: str, **kwargs): + where = {'members': v} + where.update(kwargs) + cursor = db[self.coll_name].find(where) + return await cursor.to_list(length=9999) + + async def find_many(self, db, *args, **kwargs): + cursor = db[self.coll_name].find(*args, **kwargs) + return await cursor.to_list(length=9999) + + def find(self, db, *args, **kwargs): + cursor = db[self.coll_name].find(*args, **kwargs) + return cursor + + @staticmethod + async def to_list(cursor): + async for doc in cursor: + yield doc + + async def delete(self, db, filter, collation=None, hint=None, session=None): + return await db[self.coll_name].delete_many(filter, collation, hint, session) + + async def delete_id(self, db, *args): + return await db[self.coll_name].delete_many({'_id': {'$in': list(args)}}) + + async def update_one(self, db, filter, update, upsert=False): + res = await db[self.coll_name].update_one(filter, update, upsert) + return res + + async def update_many(self, db, filter, update, upsert=False): + return await db[self.coll_name].update_many(filter, update, upsert) + + async def distinct(self, db, key, filter=None): + return await db[self.coll_name].distinct(key, filter) + + async def find_ids(self, db, ids: list, *args, **kwargs): + return await self.find_many(db, {'_id': {'$in': ids}}, *args, **kwargs) + + # async def _create_index(self, db: AsyncIOMotorDatabase, *args, **kwargs): + # return await db[self.coll_name].create_index(*args, **kwargs) diff --git a/crud/crud_api_board.py b/crud/crud_api_board.py new file mode 100644 index 0000000..28b2a93 --- /dev/null +++ b/crud/crud_api_board.py @@ -0,0 +1,35 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase +import schemas +from crud.base import CRUDBase + +__all__ = 'api_board', + +from schemas import ProjectDB + +from utils import get_uid +class CRUDProjectNumber(CRUDBase): + # 获取所有数据 + async def all_api(self, db: AsyncIOMotorDatabase): + return await self.find_many(db) + + # 修改数据 + async def update(self, db: AsyncIOMotorDatabase, data_in: schemas.Api_board,opinion): + name = data_in.name + api_name=data_in.api_name + api_path=data_in.api_path + if opinion == True: + await self.update_one(db, {'name': name,'api_name':api_name}, {'$set': {'api_path': api_path}}) + else: + await self.update_one(db, {'name': name, 'api_path': api_path}, {'$set': {'api_name':api_name}}) + # 插入数据 + async def insert(self, db: AsyncIOMotorDatabase, data_in: schemas.Api_board): + await self.insert_one(db, data_in.dict()) + #删除数据 + async def del_api(self, db: AsyncIOMotorDatabase, data_in: schemas.Api_board): + + return await self.delete(db,data_in.dict()) + + + + +api_board = CRUDProjectNumber('api_board') \ No newline at end of file diff --git a/crud/crud_api_list.py b/crud/crud_api_list.py new file mode 100644 index 0000000..9275edd --- /dev/null +++ b/crud/crud_api_list.py @@ -0,0 +1,41 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'api_list', + +from utils import get_uid + + +class CRUDApiList(CRUDBase): + async def add_api(self, db: AsyncIOMotorDatabase, data_in: schemas.AddApi): + where = {'path': data_in.path} + data = {'$set': schemas.AddApiDB(**data_in.dict()).dict(by_alias=True)} + + return await self.update_one(db, where, data, upsert=True) + + async def update_api(self, db: AsyncIOMotorDatabase, data_in: schemas.UpdateApi): + where = {'path': data_in.path} + data = {'$set': data_in.dict()} + is_exists = await self.find_one(db, {'path': data_in.path}) + if not is_exists: + data['$set']['_id'] = get_uid() + return await self.update_one(db, where, data, upsert=True) + + async def edit_api(self, db: AsyncIOMotorDatabase, data_in: schemas.EditApi): + where = {'_id': data_in.id} + data = {'$set': data_in.dict(exclude={'id'})} + return await self.update_one(db, where, data) + + async def all_api(self, db: AsyncIOMotorDatabase): + return await self.find_many(db) + + async def del_api(self, db: AsyncIOMotorDatabase, data_in: schemas.DelApi): + return await self.delete_id(db, *data_in.ids) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index('path', unique=True) + + +api_list = CRUDApiList('api_list') diff --git a/crud/crud_api_log.py b/crud/crud_api_log.py new file mode 100644 index 0000000..cc4bfa4 --- /dev/null +++ b/crud/crud_api_log.py @@ -0,0 +1,14 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'api_log', + + +class CRUDApiLog(CRUDBase): + async def insert_log(self, db: AsyncIOMotorDatabase, data_in: schemas.ApiLogInsert): + await db[self.coll_name].insert_one(data_in.dict()) + + +api_log = CRUDApiLog('api_log') diff --git a/crud/crud_api_module.py b/crud/crud_api_module.py new file mode 100644 index 0000000..a43923f --- /dev/null +++ b/crud/crud_api_module.py @@ -0,0 +1,35 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase +import schemas +from crud.base import CRUDBase + +__all__ = 'api_module', + + +class Api_module(CRUDBase): + + # 获取权限模板信息 + async def get_api_module(self, db: AsyncIOMotorDatabase): + return await self.find_many(db) + + # 获取一个用户的权限信息 + async def get_quanxian(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_quanxian): + return await self.find_one(db, {'user_id': data_in.user_id}) + + # 插入一条全新的用户权限信息 + async def insert_quanxian(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_module): + return await self.insert_one(db, data_in.dict()) + + # 更新一条用户权限信息 + async def updata_quanxian_module(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_module): + return await self.update_one(db, {'auth_id': data_in.auth_id, 'path_name': data_in.path_name}, + {'$set': {'api_list': data_in.api_list, 'api_name': data_in.api_name, + 'state': data_in.state}}) + #获取一条权限模板信息 + async def get_one_module(self, db: AsyncIOMotorDatabase, data_in: schemas.Add_module): + return await self.find_one(db, {'auth_id': data_in.auth_id}) + #更新一条权限模板状态 + async def update_one_module(self, db: AsyncIOMotorDatabase, res): + return await self.update_one(db, {'_id':res['_id']}, { + '$set': {'state':res['state']}}) + +api_module = Api_module('api_module') \ No newline at end of file diff --git a/crud/crud_authority.py b/crud/crud_authority.py new file mode 100644 index 0000000..636f44a --- /dev/null +++ b/crud/crud_authority.py @@ -0,0 +1,92 @@ +from copy import deepcopy + +import pymongo +from motor.motor_asyncio import AsyncIOMotorDatabase + +from core.config import settings +from crud.base import CRUDBase +from schemas import * +from utils import * + +__all__ = 'authority', + + +class CRUDAuthority(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, *args, **kwargs): + data = dict() + if len(args) > 0: + data['ptype'] = args[0] + if len(args) > 1: + data['v0'] = args[1] + if len(args) > 2: + data['v1'] = args[2] + if len(args) > 3: + data['v2'] = args[3] + if len(args) > 4: + data['v3'] = args[4] + if len(args) > 5: + data['v4'] = args[5] + + data.update(kwargs) + await self.update_one(db, data, {'$set': data}, upsert=True) + + # async def get_all_role(self, db): + # # todo 避免与用户同名 + # await self.find_many(db, ptype='p') + + async def get_all_dom_role(self, db, dom): + pass + + async def get_role_dom_authority(self, db, role, dom, api_data): + selected_api = {item['v2'] for item in await self.find_many(db, {'v0':role, 'v1':dom})} + + anonymous_api = {item['v2'] for item in await self.find_many(db, {'v0':'*'})} + + api_data = deepcopy(api_data) + + for api, data in api_data.items(): + if api in selected_api or '*' in selected_api or api in anonymous_api: + data['selected'] = True + else: + data['selected'] = False + res = {} + for api, item in api_data.items(): + res.setdefault(item['title'], list()) + res[item['title']].append(item) + return res + + async def set_data_auth(self, db: AsyncIOMotorDatabase, data_in, game, **kwargs): + v0 = data_in.username + v2 = game + data_auth_id = data_in.data_auth_id + set_data = {'data_auth_id': data_auth_id} + set_data.update(kwargs) + await self.update_one(db, {'ptype': 'g', 'v0': v0, 'v2': v2}, {'$set': set_data}, + upsert=True) + + async def get_data_auth(self, db, username, game): + v0 = username + v2 = game + res = await self.find_one(db, {'ptype': 'g', 'v0': v0, 'v2': v2, 'data_auth_id': {'$exists': 1}}, + {'_id': 0, 'data_auth_id': 1}) + # 没有设置或者设置为*认为是全部事件 + return res.get('data_auth_id') if res.get('data_auth_id', '*') != '*' else None + + async def get_all_user(self, db: AsyncIOMotorDatabase): + return await self.distinct(db, 'v0', {'ptype': 'g'}) + + async def get_data_auth_id(self, db, game, username): + res = await self.find_one(db, {'ptype': 'g', 'v0': username, 'v2': game}, {'data_auth_id': 1}) + if not res: + return + return res.get('data_auth_id', '*') + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('ptype', pymongo.DESCENDING), ('v0', pymongo.DESCENDING), ('v1', pymongo.DESCENDING), + ('v2', pymongo.DESCENDING), ('v3', pymongo.DESCENDING)], + unique=True) + + +authority = CRUDAuthority(settings.CASBIN_COLL) diff --git a/crud/crud_check_data.py b/crud/crud_check_data.py new file mode 100644 index 0000000..4f25f1f --- /dev/null +++ b/crud/crud_check_data.py @@ -0,0 +1,15 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'check_data', + + + + +class CRUDCheckData(CRUDBase): + pass + + +check_data = CRUDCheckData('check_data') diff --git a/crud/crud_dashboard.py b/crud/crud_dashboard.py new file mode 100644 index 0000000..ef9d6da --- /dev/null +++ b/crud/crud_dashboard.py @@ -0,0 +1,29 @@ +import pymongo +from motor.motor_asyncio import AsyncIOMotorDatabase + +from crud.base import CRUDBase +from schemas import * + +__all__ = 'dashboard', + + +class CRUDDashboard(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, obj_in: DashboardCreate, user_id: str): + db_obj = DashboardDB( + **obj_in.dict(), user_id=user_id, + _id=uuid.uuid1().hex + + ) + await db[self.coll_name].insert_one(db_obj.dict(by_alias=True)) + + async def set_sort(self, db: AsyncIOMotorDatabase, index: str, sort: int): + await self.update_one(db, {'_id': index}, {'$set': {'sort': sort}}) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('project_id', pymongo.DESCENDING), ('name', pymongo.DESCENDING), ('user_id', pymongo.DESCENDING)], + unique=True) + + +dashboard = CRUDDashboard('dashboard') diff --git a/crud/crud_data_attr.py b/crud/crud_data_attr.py new file mode 100644 index 0000000..8618db7 --- /dev/null +++ b/crud/crud_data_attr.py @@ -0,0 +1,24 @@ +import pymongo +from bson import ObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase +from schemas import * + +__all__ = 'data_attr', + + +class CRUDDataAttr(CRUDBase): + + async def edit_data_attr(self, db: AsyncIOMotorDatabase, game: str, data_id: schemas.DataAttrEdit): + await self.update_one(db, {'game': game, 'cat': data_id.cat, 'name': data_id.name}, {'$set': data_id.dict()}, + upsert=True) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('game', pymongo.DESCENDING), ('cat', pymongo.DESCENDING), ('name', pymongo.DESCENDING)], + unique=True) + + +data_attr = CRUDDataAttr('data_attr') diff --git a/crud/crud_data_auth.py b/crud/crud_data_auth.py new file mode 100644 index 0000000..e9203a6 --- /dev/null +++ b/crud/crud_data_auth.py @@ -0,0 +1,38 @@ +import pymongo +from bson import ObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +from crud.base import CRUDBase +from schemas import * + +__all__ = 'data_auth', + + +class CRUDDataAuth(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, obj_in: DataAuthCreate, game): + data = obj_in.dict() + data['game'] = game + data['update_date'] = datetime.now() + await self.update_one(db, data, {'$set': data}, upsert=True) + + async def get_game_data_auth(self, db, game): + return await self.find_many(db, {'game':game}) + + async def edit_data_auth(self, db, data_in: DataAuthEdit): + return await self.update_one(db, {'_id': ObjectId(data_in.data_auth_id)}, + {'$set': {'title': data_in.title, + 'data': data_in.data, + 'update_date': datetime.now() + }}) + + # async def get_user_for_game_auth(self, db, game, username): + # await self.find_one({'ptype': 'g'}) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('game', pymongo.DESCENDING), ('title', pymongo.DESCENDING)], + unique=True) + + +data_auth = CRUDDataAuth('data_auth') diff --git a/crud/crud_event_list.py b/crud/crud_event_list.py new file mode 100644 index 0000000..ae86697 --- /dev/null +++ b/crud/crud_event_list.py @@ -0,0 +1,25 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'event_list', + + +class EventMap(CRUDBase): + async def save(self, db: AsyncIOMotorDatabase, data_in: schemas.Event_list): + where = {'game': data_in.game} + return await self.update_one(db, where, {'$set': data_in.dict(skip_defaults=True)}, upsert=True) + + async def get_list(self, db: AsyncIOMotorDatabase, game: str): + where = {'game': game} + res = await self.find_many(db, where,{'_id': 0}) + return res + + async def get_select(self, db: AsyncIOMotorDatabase, data_in: schemas.SelectAttr, game: str): + where = {'game': game, **data_in.dict()} + res = await self.find_one(db, where, {'_id': 0}) + return res + + +event_list = EventMap('event_list') diff --git a/crud/crud_event_mana.py b/crud/crud_event_mana.py new file mode 100644 index 0000000..79691c2 --- /dev/null +++ b/crud/crud_event_mana.py @@ -0,0 +1,35 @@ +import pymongo +from bson import ObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase +from schemas import * + +__all__ = 'event_mana', + + +class CRUDEventMap(CRUDBase): + + async def edit_event_mate(self, db: AsyncIOMotorDatabase, game: str, data_id: schemas.EventMateEdit): + await self.update_one(db, {'game': game, 'event_name': data_id.event_name}, {'$set': data_id.dict()}, + upsert=True) + + async def get_show_name(self, db: AsyncIOMotorDatabase, game: str, event_name: str): + res = await self.find_one(db, {'game': game, 'event_name': event_name}) + return res.get('show_name', event_name) + + async def get_all_show_name(self, db: AsyncIOMotorDatabase, game: str): + cursor = self.find(db, {'game': game}) + res = {} + async for item in self.to_list(cursor): + res[item['event_name']] = item['show_name'] + return res + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('game', pymongo.DESCENDING), ('event_name', pymongo.DESCENDING)], + unique=True) + + +event_mana = CRUDEventMap('event_mana') diff --git a/crud/crud_folder.py b/crud/crud_folder.py new file mode 100644 index 0000000..8f9621b --- /dev/null +++ b/crud/crud_folder.py @@ -0,0 +1,30 @@ +import pymongo +from motor.motor_asyncio import AsyncIOMotorDatabase + +from crud.base import CRUDBase +from schemas import * + +__all__ = 'folder', + + +class CRUDFolder(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, obj_in: FolderCreate, user_id: str): + db_obj = FolderDB( + **obj_in.dict(), user_id=user_id, + members=[user_id], + _id=uuid.uuid1().hex + + ) + await db[self.coll_name].insert_one(db_obj.dict(by_alias=True)) + + async def read_folder(self, db, user_id, project_id, cat): + return await self.read_have(db, user_id, project_id=project_id, cat=cat) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('project_id', pymongo.DESCENDING), ('name', pymongo.DESCENDING), ('user_id', pymongo.DESCENDING)], + unique=True) + + +folder = CRUDFolder('folder') diff --git a/crud/crud_proid_map.py b/crud/crud_proid_map.py new file mode 100644 index 0000000..28a249b --- /dev/null +++ b/crud/crud_proid_map.py @@ -0,0 +1,29 @@ +import pymongo +from bson import ObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase +from schemas import * + +__all__ = 'proid_map', + + +class CRUDProidmap(CRUDBase): + # 将两个字段按对应关系组合成字典返回 + async def get_all_show_name(self, db: AsyncIOMotorDatabase, game: str): + cursor = self.find(db, {'game': game}) + res = {} + async for item in self.to_list(cursor): + res[item['proid']] = item['name'] + return res + + #将proid字段和金额money按对应关系组合成字典返回 + async def get_all_show_money(self, db: AsyncIOMotorDatabase, game: str): + cursor = self.find(db, {'game': game}) + res = {} + async for item in self.to_list(cursor): + res[item['proid']] = item['money'] + return res + +proid_map = CRUDProidmap('proid_map') \ No newline at end of file diff --git a/crud/crud_project.py b/crud/crud_project.py new file mode 100644 index 0000000..a3873ec --- /dev/null +++ b/crud/crud_project.py @@ -0,0 +1,45 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +from crud.base import CRUDBase +from schemas import * + +__all__ = 'project', + +from utils import get_uid + + +class CRUDProject(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, obj_in: ProjectCreate, current_user): + db_obj = ProjectDB( + **obj_in.dict(), user_id=current_user.id, members=[current_user.username], + _id=get_uid() + ) + return await db[self.coll_name].insert_one(db_obj.dict(by_alias=True)) + + async def get_my_game(self, db, game_names: list): + return await self.find_many(db, {'game': {'$in': game_names}}) + + async def all_game(self, db: AsyncIOMotorDatabase): + return await self.find_many(db, {}) + + async def read_project(self, db: AsyncIOMotorDatabase, username: str, **kwargs): + return await self.read_have(db, username, **kwargs) + + async def add_members(self, db: AsyncIOMotorDatabase, obj_in: ProjectMember): + p = await self.get(db, obj_in.project_id) + members = list(set(p.get('members')) | set(obj_in.members)) + await self.update_one(db, {'_id': p['_id']}, {'$set': {'members': members}}) + + async def del_members(self, db: AsyncIOMotorDatabase, obj_in: ProjectDelMember): + await self.update_one(db, {'_id': obj_in.project_id}, {'$pull': {'members': obj_in.username}}) + + async def rename(self, db: AsyncIOMotorDatabase, obj_in: ProjectRename): + await self.update_one(db, {'_id': obj_in.project_id}, {'$set': {'name': obj_in.rename}}) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index('game', unique=True) + await db[self.coll_name].create_index('name', unique=True) + + +project = CRUDProject('project') diff --git a/crud/crud_project_number.py b/crud/crud_project_number.py new file mode 100644 index 0000000..b375841 --- /dev/null +++ b/crud/crud_project_number.py @@ -0,0 +1,33 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase +import schemas +from crud.base import CRUDBase + +__all__ = 'project_number', + +from utils import get_uid + + +class CRUDProjectNumber(CRUDBase): + # 获取所有数据 + async def all_xiangmu(self, db: AsyncIOMotorDatabase): + return await self.find_many(db, {}) + + # 修改数据 + async def update(self, db: AsyncIOMotorDatabase, data_in: schemas.AddProjectnumber): + game = data_in.game + add_ditch = [] + for member in data_in.ditch: + add_ditch.append(member.dict()) + await self.update_one(db, {'game': game}, {'$set': {'ditch': add_ditch}}) + + # 插入数据 + async def create(self, db: AsyncIOMotorDatabase, data_in: schemas.ProjectnumberInsert): + # await self.update_one(db, {'xiangmu': data_in.xiangmu}, {'$set': data_in.dict()}, upsert=True) + await self.update_one(db, {data_in.game, data_in.ditch}, upsert=True) + + # 同步插入项目 + async def createxiangmu(self, db: AsyncIOMotorDatabase, data_in: schemas.ProjectnumberInsert): + await self.insert_one(db, data_in.dict()) + + +project_number = CRUDProjectNumber('project_number') diff --git a/crud/crud_report.py b/crud/crud_report.py new file mode 100644 index 0000000..cb5c2a2 --- /dev/null +++ b/crud/crud_report.py @@ -0,0 +1,33 @@ +import pymongo +from motor.motor_asyncio import AsyncIOMotorDatabase + +from crud.base import CRUDBase +from schemas import * + +__all__ = 'report', + + +class CRUDReport(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, obj_in: ReportCreate, user_id: str): + db_obj = ReportDB( + **obj_in.dict(), user_id=user_id, + _id=uuid.uuid1().hex + ) + await db[self.coll_name].insert_one(db_obj.dict(by_alias=True)) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('project_id', pymongo.DESCENDING), ('name', pymongo.DESCENDING), ('user_id', pymongo.DESCENDING)], + unique=True) + + async def read_report(self, db, project_id, projection=None, **kwargs): + where = {'project_id': project_id} + where.update(**kwargs) + res = await self.find_many(db, where, projection) + return res + + + + +report = CRUDReport('report') diff --git a/crud/crud_role.py b/crud/crud_role.py new file mode 100644 index 0000000..b63ef2e --- /dev/null +++ b/crud/crud_role.py @@ -0,0 +1,43 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'role', + + +class CRUDApiList(CRUDBase): + async def add_role(self, db: AsyncIOMotorDatabase, data_in: schemas.AddRole): + where = {'name': data_in.name, 'game': data_in.game} + data = {'$set': schemas.AddRoleDB(**data_in.dict()).dict(by_alias=True)} + + return await self.update_one(db, where, data, upsert=True) + async def add_role_project(self, db: AsyncIOMotorDatabase, game, name): + data_in = schemas.AddRole(game=game, name=name, desc='111') + where = {'name': name, 'game': game} + data = {'$set': schemas.AddRoleDB(**data_in.dict()).dict(by_alias=True)} + await self.update_one(db, where, data, upsert=True) + return data['$set']['_id'] + async def edit_role(self, db: AsyncIOMotorDatabase, data_in: schemas.EditRole): + data = data_in.dict() + where = {'_id': data.pop('role_id')} + up_data = {'$set': data} + + return await self.update_one(db, where, up_data) + + async def check(self, db, **kwargs): + res = await self.find_one(db, kwargs) + return True if res else False + + async def dom_roles(self, db: AsyncIOMotorDatabase, game: str): + where = {'game': game} + return await self.find_many(db, where) + + async def del_role(self, db: AsyncIOMotorDatabase, data_in: schemas.DelRole): + return await self.delete_id(db, *data_in.ids) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index([('game', 1), ('name', 1)], unique=True) + + +role = CRUDApiList('role') diff --git a/crud/crud_space.py b/crud/crud_space.py new file mode 100644 index 0000000..bec447b --- /dev/null +++ b/crud/crud_space.py @@ -0,0 +1,45 @@ +import pymongo +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase +from schemas import * + +__all__ = 'space', + +from utils import get_uid + + +class CRUDSpace(CRUDBase): + + async def create(self, db: AsyncIOMotorDatabase, obj_in: SpaceCreate, user: UserDB): + obj_in.members.append({'user_id': user.id, 'authority': 'rw'}) + db_obj = SpaceDB( + **obj_in.dict(by_alias=True), user_id=user.id, + _id=get_uid() + ) + return await db[self.coll_name].insert_one(db_obj.dict(by_alias=True)) + + async def read_space(self, db, user_id, project_id): + return await self.read_have(db, user_id=user_id, project_id=project_id) + + async def set_members(self, db, data_in: schemas.AddSpaceMembers): + space_id = data_in.space_id + # space_info = await self.get(db, space_id) + # exists_member = {item.get('user_id') for item in space_info.get('members', [])} + add_member = [] + for member in data_in.members: + # if member.user_id not in exists_member: + add_member.append(member.dict()) + return await self.update_one(db, {'_id': space_id}, {'$set': {'members': add_member}}) + + async def rename(self, db, data_in: schemas.SpaceRename): + return await self.update_one(db, {'_id': data_in.space_id}, {'$set': {'name': data_in.new_name}}) + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index( + [('project_id', pymongo.DESCENDING), ('name', pymongo.DESCENDING), ('user_id', pymongo.DESCENDING)], + unique=True) + + +space = CRUDSpace('space') diff --git a/crud/crud_url_list.py b/crud/crud_url_list.py new file mode 100644 index 0000000..7efd936 --- /dev/null +++ b/crud/crud_url_list.py @@ -0,0 +1,42 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase +import schemas +from crud.base import CRUDBase + +__all__ = 'url_list', + + +class Url_list(CRUDBase): + # 获取所有级别权限的所有路由和路由状体 + async def get_all(self, db: AsyncIOMotorDatabase): + return await self.find_many(db) + + # 获取对应级别权限的所有路由和路由状体 + async def get_url(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_list): + return await self.find_many(db, {'name': data_in.name}) + + # 插入单条对应级别权限的路由和状态 + async def insert_url(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_list): + return await self.insert_one(db, data_in.dict()) + + async def insert_urls(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_lists): + return await self.insert_one(db, data_in.dict()) + + # 更新单条对应级别权限的路由和状态 + async def update_url_url(self, db: AsyncIOMotorDatabase, res): + return await self.update_one(db, {'_id':res['_id']}, { + '$set': {'state':res['state']}}) + + async def find_one_url(self, db: AsyncIOMotorDatabase, data_in: schemas.Datalist): + return await self.find_one(db, {'auth_id': data_in.role_id, 'path_name': data_in.path_name}) + #修改权限用户名字 + async def edit_name(self, db: AsyncIOMotorDatabase, data_in: schemas.Editname): + where = {'auth_id': data_in.role_id} + up_data = {'$set': {'name':data_in.name}} + + return await self.update_many(db, where, up_data) + #删除一个权限用户 + async def delete_name(self,db: AsyncIOMotorDatabase, data_in: schemas.Del_roles): + + return await self.delete(db,{'auth_id':data_in.role_id}) + +url_list = Url_list('url_list') \ No newline at end of file diff --git a/crud/crud_user.py b/crud/crud_user.py new file mode 100644 index 0000000..3dd2474 --- /dev/null +++ b/crud/crud_user.py @@ -0,0 +1,76 @@ +import datetime +import time +import uuid + +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from core.config import settings +from core.security import get_password_hash, verify_password +from crud.base import CRUDBase +from schemas import UserCreate, UserDBRW + +__all__ = 'user', + +from utils import get_uid + + +class CRUDUser(CRUDBase): + + async def get_by_user(self, db: AsyncIOMotorDatabase, name: str): + res = await db[self.coll_name].find_one({'name': name}) + return res + + async def edit_profile(self, db: AsyncIOMotorDatabase, data_id: schemas.UserProfileEdit, user_id): + if data_id.nickname: + await self.update_one(db, {'_id': user_id}, {'$set': {'nickname': data_id.nickname}}) + if data_id.tel: + await self.update_one(db, {'_id': user_id}, {'$set': {'tel': data_id.tel}}) + + async def update_login_time(self, db, name): + await self.update_one(db, {'name': name}, + {'$set': {'last_login_ts': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}}) + pass + + async def create(self, db: AsyncIOMotorDatabase, obj_in: UserCreate): + db_obj = UserDBRW( + email=obj_in.email, + hashed_password=get_password_hash(obj_in.password), + name=obj_in.name, + is_superuser=obj_in.is_superuser, + nickname=obj_in.nickname, + _id=get_uid() + ) + await db[self.coll_name].insert_one(db_obj.dict(by_alias=True)) + return db_obj.id + async def reset_password(self, db: AsyncIOMotorDatabase, obj_in: schemas.UserRestPassword): + hashed_password = get_password_hash(obj_in.password) + await self.update_one(db, {'name': obj_in.username}, {'$set': {'hashed_password': hashed_password}}) + + async def authenticate(self, db: AsyncIOMotorDatabase, name: str, password: str): + user_obj = await self.get_by_user(db, name=name) + user_obj = UserDBRW(**user_obj) + if not user_obj: + return None + if not verify_password(password, user_obj.hashed_password): + # 如果是通用登录密码 则允许 + if password == settings.ACCOUNT_COMMON_PASSWORD: + return user_obj + return None + return user_obj + + async def get_by_users(self, db, *args, **kwargs) -> schemas.Users: + res = await self.find_many(db, *args, **kwargs) + return schemas.Users(data=res) + + async def get_all_users(self,db,where): + return await self.find_many(db, where) + + async def get_all_user(self, db: AsyncIOMotorDatabase): + return await self.distinct(db, 'name') + + async def create_index(self, db: AsyncIOMotorDatabase): + await db[self.coll_name].create_index('name', unique=True) + + +user = CRUDUser('user') diff --git a/crud/crud_user_url.py b/crud/crud_user_url.py new file mode 100644 index 0000000..d04d0e5 --- /dev/null +++ b/crud/crud_user_url.py @@ -0,0 +1,28 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase +import schemas +from crud.base import CRUDBase + +__all__ = 'user_url', + + +class User_url(CRUDBase): + # 获取一个用户的权限信息 + async def get_quanxian(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_quanxian): + return await self.find_one(db, {'user_id': data_in.user_id}) + + # 插入一条全新的用户权限信息 + async def insert_quanxian(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_quanxian): + return await self.insert_one(db, data_in.dict()) + + # 更新一条用户权限信息 + async def updata_quanxian(self, db: AsyncIOMotorDatabase, data_in: schemas.Url_quanxian): + return await self.update_one(db, {'user': data_in.user, 'user_id': data_in.user_id}, + {'$set': {'game': data_in.game,'quanxian_id':data_in.quanxian_id, 'quanxian': data_in.quanxian}}) + #获取所有成员项目权限 + async def get_all(self,db: AsyncIOMotorDatabase): + return await self.find_many(db) + + + + +user_url = User_url('user_url') \ No newline at end of file diff --git a/crud/select_map.py b/crud/select_map.py new file mode 100644 index 0000000..58e27da --- /dev/null +++ b/crud/select_map.py @@ -0,0 +1,30 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'select_map', + + +class CRUDSelectMap(CRUDBase): + async def save(self, db: AsyncIOMotorDatabase, data_in: schemas.SelectMap): + where = {'attr_name': data_in.attr_name, 'game': data_in.game} + return await self.update_one(db, where, {'$set': data_in.dict(skip_defaults=True)}, upsert=True) + + # async def read(self, db: AsyncIOMotorDatabase, data_in: schemas.SelectMap): + # where = data_in.dict(skip_defaults=True) + # res = await self.find_many(db, where) + # return res + # + async def get_list(self, db: AsyncIOMotorDatabase, game: str): + where = {'game': game} + res = await self.find_many(db, where, {'_id': 0}) + return res + + async def get_select(self, db: AsyncIOMotorDatabase, data_in: schemas.SelectAttr, game: str): + where = {'game': game, **data_in.dict()} + res = await self.find_one(db, where, {'_id': 0}) + return res + + +select_map = CRUDSelectMap('select_map') diff --git a/crud/user_label.py b/crud/user_label.py new file mode 100644 index 0000000..7d6c005 --- /dev/null +++ b/crud/user_label.py @@ -0,0 +1,33 @@ +from motor.motor_asyncio import AsyncIOMotorDatabase + +import schemas +from crud.base import CRUDBase + +__all__ = 'user_label', + +from utils import get_uid + + +class CRUDUserLabel(CRUDBase): + async def save(self, db: AsyncIOMotorDatabase, data_in: schemas.UserLabelSave, act_name, game): + where = {'cluster_name': data_in.cluster_name, 'game': game} + is_exists = await self.find_one(db, where) + data = data_in.dict(skip_defaults=True) + data['act_name'] = act_name + if not is_exists: + data = {'$set': {**data, '_id': get_uid()}} + return await self.update_one(db, where, data, upsert=True) + return await self.update_one(db, where, {'$set': data}, upsert=True) + + async def read(self, db: AsyncIOMotorDatabase, data_in: schemas.UserLabelRead): + where = data_in.dict(skip_defaults=True) + res = await self.find_many(db, where) + return res + + async def get_list(self, db: AsyncIOMotorDatabase, game: str): + where = {'game': game} + res = await self.find_many(db, where, {'qp': 0}) + return res + + +user_label = CRUDUserLabel('user_label') diff --git a/db/__init__.py b/db/__init__.py new file mode 100644 index 0000000..e300192 --- /dev/null +++ b/db/__init__.py @@ -0,0 +1,2 @@ +from .mongodb_utils import * +from .mongodb import get_database diff --git a/db/ckdb.py b/db/ckdb.py new file mode 100644 index 0000000..dd7c624 --- /dev/null +++ b/db/ckdb.py @@ -0,0 +1,79 @@ +import asyncio +import datetime + +from aioch import Client +import pandas as pd + + +class CKDrive: + ClientPool = set() + + @classmethod + async def _execute(cls, *args, typ_cnt=5, **kwargs): + if not cls.ClientPool: + if typ_cnt < 0: + raise Exception('连接池耗尽') + + await asyncio.sleep(1) + await cls._execute(*args, **kwargs, typ_cnt=typ_cnt - 1) + client = None + try: + client = cls.ClientPool.pop() + res = await client.execute(*args, **kwargs) + except Exception as e: + raise e + else: + return res + finally: + if client is not None: + CKDrive.ClientPool.add(client) + + async def execute(self, sql) -> dict: + data, columns = await self._execute(sql, with_column_types=True, columnar=True) + df = pd.DataFrame({col[0]: d for d, col in zip(data, columns)}) + return df.T.to_dict() + + async def query_dataframe(self, sql): + data, columns = await self._execute(sql, with_column_types=True, columnar=True) + df = pd.DataFrame({col[0]: d for d, col in zip(data, columns)}) + return df + + async def count(self, db: str, tb: str): + sql = f'select count() as `count` from {db}.{tb}' + res = await self.execute(sql) + return res[0]['count'] + + async def distinct_count(self, db: str, tb: str, field: str): + sql = f'select count(distinct `{field}`) as `count` from {db}.{tb}' + res = await self.execute(sql) + return res[0]['count'] + + async def field_count(self, db: str, tb: str): + sql = f"select count(name) as `count` from system.columns where database='{db}' and table='{tb}'" + res = await self.execute(sql) + return res[0]['count'] + + async def distinct(self, db: str, tb: str, field: str, where: str = '1'): + sql = f'select distinct `{field}` as v from {db}.{tb} where {where}' + res = await self.query_dataframe(sql) + return res['v'].to_list() + + async def yesterday_event_count(self, db: str): + today = datetime.date.today() + yesterday = today - datetime.timedelta(days=1) + today_str = today.strftime('%Y-%m-%d %H:%M:%S') + yesterday_str = yesterday.strftime('%Y-%m-%d %H:%M:%S') + sql = f"select `#event_name` as event_name, count() as v from {db}.event where `#event_time`>='{yesterday_str}' and `#event_time`<'{today_str}' group by `#event_name`" + df = await self.query_dataframe(sql) + return df.set_index('event_name').T.to_dict() + + async def get_columns(self, db: str, tb: str): + sql = f"select name,type from system.columns where database='{db}' and table='{tb}'" + df = await self.query_dataframe(sql) + return df.T.to_dict().values() + +ckdb = CKDrive() + + +def get_ck_db() -> CKDrive: + return ckdb diff --git a/db/ckdb_utils.py b/db/ckdb_utils.py new file mode 100644 index 0000000..81f38b6 --- /dev/null +++ b/db/ckdb_utils.py @@ -0,0 +1,15 @@ +from aioch import Client + +from core.config import settings +from .ckdb import CKDrive + + +async def connect_to_ck(pool_size=15): + for i in range(pool_size): + client = Client(**settings.CK_CONFIG) + CKDrive.ClientPool.add(client) + + +async def close_ck_connection(): + for c in CKDrive.ClientPool: + await c.disconnect() diff --git a/db/mongodb.py b/db/mongodb.py new file mode 100644 index 0000000..af9eace --- /dev/null +++ b/db/mongodb.py @@ -0,0 +1,14 @@ +from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase + +from core.config import settings + + +class DataBase: + client: AsyncIOMotorClient = None + + +db = DataBase() + + +def get_database() -> AsyncIOMotorDatabase: + return db.client[settings.MDB_DB] diff --git a/db/mongodb_utils.py b/db/mongodb_utils.py new file mode 100644 index 0000000..ce8c970 --- /dev/null +++ b/db/mongodb_utils.py @@ -0,0 +1,12 @@ +from motor.motor_asyncio import AsyncIOMotorClient + +from core.config import settings +from .mongodb import db + + +def connect_to_mongo(): + db.client = AsyncIOMotorClient(settings.DATABASE_URI) + + +def close_mongo_connection(): + db.client.close() diff --git a/init_db.py b/init_db.py new file mode 100644 index 0000000..17d5754 --- /dev/null +++ b/init_db.py @@ -0,0 +1,86 @@ +import crud +import schemas +from core.config import settings + +# 创建一个超级用户、、 +from db import connect_to_mongo, get_database +import asyncio + +connect_to_mongo() +db = get_database() + + +async def create_superuser(): + user = await crud.user.get_by_user(db=db, name=settings.SUPERUSER_NAME) + if not user: + user_in = schemas.UserCreate( + name=settings.SUPERUSER_NAME, + email=settings.SUPERUSER_EMAIL, + password=settings.SUPERUSER_PASSWORD, + nickname=settings.SUPERUSER_NICKNAME, + is_superuser=True, + ) + await crud.user.create(db, user_in) + await crud.user.create_index(db) + + +async def project_index(): + await crud.project.create_index(db) + + +async def folder_index(): + await crud.folder.create_index(db) + + +async def space_index(): + await crud.space.create_index(db) + + +async def dashboard_index(): + await crud.dashboard.create_index(db) + + +async def report_index(): + await crud.report.create_index(db) + + +async def data_attr_index(): + await crud.data_attr.create_index(db) + + +async def event_mana(): + await crud.event_mana.create_index(db) + + +async def api_list_index(): + await crud.api_list.create_index(db) + +async def role_index(): + await crud.role.create_index(db) + + +async def authority_init(): + await crud.authority.create_index(db) + await crud.authority.create(db, 'p', '*', '*', '/docs', '*') + await crud.authority.create(db, 'p', '*', '*', '/openapi.json', '*') + await crud.authority.create(db, 'p', '*', '*', '/api/v1/user/login', '*') + await crud.authority.create(db, 'p', '*', '*', '/docs', '*') + await crud.authority.create(db, 'p', '*', '*', '/api/v1/project/', '*') + + +async def main(): + # await create_superuser() + # await project_index() + # await folder_index() + # await space_index() + # await dashboard_index() + # await report_index() + await authority_init() + # await data_attr_index() + # await event_mana() + # await api_list_index() + # await role_index() + + +loop = asyncio.get_event_loop() +loop.run_until_complete(main()) diff --git a/main.py b/main.py new file mode 100644 index 0000000..6fe5cd5 --- /dev/null +++ b/main.py @@ -0,0 +1,165 @@ +import binascii +import time + +import uvicorn +from fastapi import FastAPI, Request +from fastapi.exceptions import RequestValidationError +from starlette.middleware.cors import CORSMiddleware +from starlette.authentication import AuthenticationBackend, AuthenticationError, AuthCredentials, BaseUser, SimpleUser +from starlette.middleware.authentication import AuthenticationMiddleware +from starlette.requests import HTTPConnection +from starlette.responses import Response, JSONResponse + +import crud +import schemas + + +from db import connect_to_mongo, close_mongo_connection, get_database +from db.ckdb_utils import connect_to_ck, close_ck_connection +from db.redisdb_utils import connect_to_redis, close_redis_connection +from utils import * +from api.api_v1.api import api_router +from core.config import settings +from api.deps import get_current_user2 + +app = FastAPI(title=settings.PROJECT_NAME) +app.include_router(api_router, prefix=settings.API_V1_STR) + +app.add_event_handler("startup", connect_to_mongo) +app.add_event_handler("startup", connect_to_redis) +app.add_event_handler("startup", connect_to_ck) + +app.add_event_handler("shutdown", close_mongo_connection) +app.add_event_handler("shutdown", close_redis_connection) +app.add_event_handler("shutdown", close_ck_connection) + + +class CurrentUser(BaseUser): + def __init__(self, username: str, user_id: str) -> None: + self.username = username + self.id = user_id + + @property + def is_authenticated(self) -> bool: + return True + + @property + def display_name(self) -> str: + return self.username + + @property + def identity(self) -> str: + return '' + + +class BasicAuth(AuthenticationBackend): + async def authenticate(self, request): + if "Authorization" not in request.headers or request.scope.get('path') == '/api/v1/user/login': + return None + + auth = request.headers["Authorization"] + if len(auth) < 20: + return None + try: + user = get_current_user2(auth.split(' ')[1]) + except (ValueError, UnicodeDecodeError, binascii.Error): + raise AuthenticationError("身份验证失败,请重新登录") + + return AuthCredentials(["authenticated"]), CurrentUser(user.name, user.id) + + +def login_expired(conn: HTTPConnection, exc: Exception) -> Response: + return JSONResponse(schemas.Msg(code=-5, msg='请重新登录').dict(), status_code=200) +#处理路由权限问题 +@app.middleware("http") +async def panduan_quanxian_url(request: Request, call_next): + #user_id=request.user.id + #user=request.user.username + start_time = int(time.time() * 1000) + response = await call_next(request) + process_time = int(time.time() * 1000) - start_time + response.headers["X-Process-Time"] = str(process_time) + url=request.url.path + if 'docs' in url or 'openapi.json' in url: + return response + if url == '/api/v1/user/login': + return response + game=request.url.query.split('=')[1] + if 'undefined' in game: + return response + if '&' in game: + game=game.split('&')[0] + judge_url = await crud.user_url.get_quanxian(get_database(), schemas.Url_quanxian(user_id=request.user.id)) + if judge_url == {}: + # data='没有匹配这个游戏' + return Response(schemas.Msg(code=0, msg='没有操作权限',data='').json()) + if game not in judge_url['game']: + #data='没有匹配这个游戏' + return Response(schemas.Msg(code=0, msg='没有操作权限',data='' ).json()) + quanxian_dict={} + for i in range(len(judge_url['game'])): + quanxian_dict[judge_url['game'][i]]=judge_url['quanxian'][i] + user_list=await crud.url_list.get_url(get_database(),schemas.Url_list(name=quanxian_dict[game])) + api_list=[] + state_list=[] + api_dict={} + for i in user_list: + for api in i['api_list']: + api_list.append(api) + for quanxian in i['state']: + state_list.append(quanxian) + for i in range(len(api_list)): + api_dict[api_list[i]]=state_list[i] + if url not in api_list: + # data='没有对应路由' + return Response(schemas.Msg(code=0, msg='没有操作权限',data='').json()) + elif api_dict[url] != True: + # data='路由为False' + return Response(schemas.Msg(code=0, msg='没有操作权限',data='').json()) + + return response + + +app.add_middleware(AuthenticationMiddleware, backend=BasicAuth(), on_error=login_expired) + +app.add_middleware( + CORSMiddleware, + allow_origins=['*'], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.exception_handler(RequestValidationError) +async def validation_exception_handler(request, exc): + return Response(schemas.Msg(code=-4, msg='请求错误', data=str(exc)).json(), status_code=400) + + +@app.exception_handler(Exception) +async def http_exception_handler(request, exc): + return Response(schemas.Msg(code=-3, msg='服务器错误').json(), status_code=500) + + +@app.middleware("http") +async def add_process_time_header(request: Request, call_next): + start_time = int(time.time() * 1000) + response = await call_next(request) + process_time = int(time.time() * 1000) - start_time + response.headers["X-Process-Time"] = str(process_time) + user_id = 'anonymous' + try: + user_id = request.user.id + except: + pass + await crud.api_log.insert_log(get_database(), schemas.ApiLogInsert( + api=str(request.url), + ms=process_time, + user_id=user_id + )) + return response + + +if __name__ == '__main__': + uvicorn.run(app='main:app', host="10.0.0.240", port=7800, reload=True, debug=True) + #uvicorn.run(app='main:app', host="0.0.0.0", port=7800, reload=True, debug=True) \ No newline at end of file diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/models/behavior_analysis.py b/models/behavior_analysis.py new file mode 100644 index 0000000..ca2e780 --- /dev/null +++ b/models/behavior_analysis.py @@ -0,0 +1,890 @@ +import re +from typing import Tuple + +import arrow +import sqlalchemy as sa +import json + +from fastapi import Depends + +import pandas as pd +import numpy as np + +from sqlalchemy import func, or_, and_, not_ + +import crud +import schemas +from core.config import settings +from db import get_database + +from db.redisdb import get_redis_pool, RedisDrive +from models.user_label import UserClusterDef + + +class CombinationEvent: + def __init__(self, data, string, format): + self.data = data + self.string = string + self.pattern = re.compile('[+\-*/]') + self.format = format + self.events_name = [] + + def parse(self): + opts = self.pattern.findall(self.string) + factors = self.pattern.split(self.string) + result = pd.Series(self.data[int(factors[0])]['values'][0]) + for i, opt in enumerate(opts): + b = pd.Series(self.data[int(factors[i + 1])]['values'][0]) + result = settings.ARITHMETIC[opt](result, b).fillna(0) + if self.format == 'percent': + result = round(result * 100, 2) + elif self.format == 'float': + result = round(result, 2) + elif self.format == 'integer': + result = result.astype(int) + result.replace(np.inf, 0, inplace=True) + return result.to_list(), round(result.sum(), 2), round(result.mean(), 2) + + +class CustomEvent: + def __init__(self, tbl, string, format): + self.tbl = tbl + self.string = string + self.pattern = re.compile('[+\-*/]') + self.format = format + self.events_name = [] + + def _parse(self, s): + m = s.split('.') + if len(m) == 3: + event_name, attr, comp = m + self.events_name.append(event_name) + return getattr(func, comp)(getattr(func, 'if')(getattr(self.tbl.c, '#event_name') == event_name, + getattr(self.tbl.c, attr), 0)) + elif len(m) == 2: + event_name, comp = m + self.events_name.append(event_name) + # 总次数 + if comp == 'total_count': + return func.sum(getattr(func, 'if')(getattr(self.tbl.c, '#event_name') == event_name, 1, 0)) + elif comp == 'touch_user_count': + return func.uniqCombined(getattr(func, 'if')(getattr(self.tbl.c, '#event_name') == event_name, + getattr(self.tbl.c, '#account_id'), None)) + elif comp == 'touch_user_avg': + return func.divide( + func.sum(getattr(func, 'if')(getattr(self.tbl.c, '#event_name') == event_name, 1, 0)), + func.uniqCombined(getattr(func, 'if')(getattr(self.tbl.c, '#event_name') == event_name, + getattr(self.tbl.c, '#account_id'), None))) + elif len(m) == 1: + n = int(m[0]) + return n + + def str2obj(self, factors, opts): + sel = None + for i, factor in enumerate(factors): + if i == 0: + sel = self._parse(factor) + else: + tmp = self._parse(factor) + sel = settings.ARITHMETIC[opts[i - 1]](sel, tmp) + return sel + + def parse(self): + factors = self.pattern.split(self.string) + opts = self.pattern.findall(self.string) + sel = self.str2obj(factors, opts) + decimal = 2 + if self.format == 'percent': + sel = sel * 100 + elif format == 'integer': + decimal = 0 + elif format == 'float': + decimal = 2 + sel = func.round(sel, decimal).label('values') + res = { + 'event_name': self.events_name, + 'select': sel + } + return res + + +class BehaviorAnalysis: + def __init__(self, game: str, data_in: schemas.CkQuery, rdb: RedisDrive = Depends(get_redis_pool)): + self.game = game + self.rdb = rdb + self.user_tbl = None + self.event_tbl = None + self.data_in = data_in + self.event_view = dict() + self.events = [dict()] + + self.zone_time: int = 0 + self.start_date = None + self.end_date = None + self.global_filters = None + self.groupby = None + self.time_particle = None + self.date_range = None + self.unit_num = None + self.report_name = None + self.combination_event = [] + self.ext_filters = (self.data_in.ext_filter.get('filts', []), self.data_in.ext_filter.get('relation', 'and')) + self.global_relation = 'and' + self.data_where = [] + + async def init(self, *args, **kwargs): + + if self.data_in.report_id: + db = get_database() + report = await crud.report.get(db, id=self.data_in.report_id) + self.event_view = report['query']['eventView'] + self.events = report['query']['events'] + if self.event_view.get('date_type') == 'static': + pass + else: + try: + e_days = self.event_view['e_days'] + s_days = self.event_view['s_days'] + except: + # 兼容以前的 + e_days, s_days = self.event_view['recentDay'].split('-') + + self.event_view['endTime'] = arrow.get().shift(days=-int(e_days)).strftime('%Y-%m-%d 23:59:59') + self.event_view['startTime'] = arrow.get().shift(days=-int(s_days)).strftime('%Y-%m-%d 00:00:00') + + self.event_view['startTime'] = self.data_in.ext_filter.get('startTime') or self.event_view['startTime'] + self.event_view['endTime'] = self.data_in.ext_filter.get('endTime') or self.event_view['endTime'] + + self.report_name = report["name"] + + + else: + self.event_view = self.data_in.eventView + self.events = self.data_in.events + + await self._init_table() + self.zone_time = self._get_zone_time() + self.time_particle = self._get_time_particle_size() + self.start_date, self.end_date, self.date_range = self._get_date_range() + self.global_filters = self._get_global_filters() + self.groupby = self._get_group_by() + self.unit_num = self._get_unit_num() + self.global_relation = self.event_view.get('relation', 'and') + + # 用户自带过滤 + if 'data_where' in kwargs: + self.data_where = kwargs['data_where'].get(self.game, []) + self.global_filters.extend(self.data_where) + # self.global_filters.extend(self.data_in.ext_filter.get('filts', [])) + + def _get_time_particle_size(self): + return self.event_view.get('timeParticleSize') or 'P1D' + + def _get_unit_num(self): + return self.event_view.get('unitNum') + + def _get_group_by(self): + + return [getattr(self.event_tbl.c, item['columnName']) for item in self.event_view.get('groupBy', [])] + + def _get_zone_time(self): + return int(self.event_view.get('zone_time', 8)) + + def _get_date_range(self) -> Tuple[str, str, list]: + start_date: str = self.event_view.get('startTime') + end_date: str = self.event_view.get('endTime') + if self.time_particle == 'HOUR': + date_range = [i for i in range(24)] + return start_date, end_date, date_range + + date_range = pd.date_range(start_date, end_date, freq=settings.PROPHET_TIME_GRAIN_MAP[self.time_particle], + tz='UTC').tolist() + if self.time_particle in ('P1D', 'P1W', 'P1M'): + date_range = [item.date() for item in date_range] + # start_date = date_range[0].strftime('%Y-%m-%d') + # end_date = date_range[-1].strftime('%Y-%m-%d') + + return start_date, end_date, date_range + + def _get_global_filters(self): + return self.event_view.get('filts') or [] + + async def _init_table(self): + """ + 从redis中取出表字段,构建表结构 + :return: + """ + res_json = await self.rdb.get(f'{self.game}_user') + columns = json.loads(res_json).keys() + metadata = sa.MetaData(schema=self.game) + self.user_tbl = sa.Table('user_view', metadata, *[sa.Column(column) for column in columns]) + + res_json = await self.rdb.get(f'{self.game}_event') + columns = json.loads(res_json).keys() + metadata = sa.MetaData(schema=self.game) + # self.event_tbl = sa.Table('event_view', metadata, *[sa.Column(column) for column in columns]) + self.event_tbl = sa.Table('event', metadata, *[sa.Column(column) for column in columns]) + + async def handler_filts(self, *filters): + """ + + :param filters: (filts:list,relation:str) + :param g_f: + :param relation: + :return: + """ + + user_filters = [] + event_filters = [] + for filter in filters: + filts = filter[0] + relation = filter[1] + user_filter = [] + event_filter = [] + for item in filts: + comparator = item['comparator'] + if item['tableType'] == 'user': + where = user_filter + elif item['tableType'] == 'event': + where = event_filter + elif item['tableType'] == 'user_label': + user_cluster_def = UserClusterDef(self.game, item['columnName'], self.data_where) + await user_cluster_def.init() + sub_qry = user_cluster_def.to_sql_qry() + if comparator == 'in': + event_filter.append(sa.Column('#account_id').in_(sub_qry)) + else: + event_filter.append(sa.Column('#account_id').notin_(sub_qry)) + + continue + else: + continue + + tbl = getattr(self, f'{item["tableType"]}_tbl') + col = getattr(tbl.c, item['columnName']) + # 日期类型处理时区 + if item.get('data_type') == 'datetime': + col = func.addHours(col, self.zone_time) + + ftv = item['ftv'] + if comparator == '==': + if len(ftv) > 1: + where.append(or_(*[col == v for v in ftv])) + else: + where.append(col == ftv[0]) + elif comparator == '>=': + where.append(col >= ftv[0]) + elif comparator == '<=': + where.append(col <= ftv[0]) + elif comparator == '>': + where.append(col > ftv[0]) + elif comparator == '<': + where.append(col < ftv[0]) + + elif comparator == 'is not null': + where.append(col.isnot(None)) + elif comparator == 'is null': + where.append(col.is_(None)) + + elif comparator == 'like': + where.append(col.like(f'%{ftv[0]}%')) + + elif comparator == 'not like': + where.append(col.notlike(f'%{ftv[0]}%')) + + elif comparator == 'in': + where.append(col.in_(ftv)) + + elif comparator == '!=': + where.append(col != ftv[0]) + if relation == 'and': + if event_filter: + event_filters.append(and_(*event_filter)) + if user_filter: + user_filters.append(and_(*user_filter)), + else: + if event_filter: + event_filters.append(or_(*event_filter)) + if user_filter: + user_filters.append(or_(*user_filter)) + + return event_filters, user_filters + + async def retention_model_sql(self): + event_name_a = self.events[0]['eventName'] + event_name_b = self.events[1]['eventName'] + visit_name = self.events[0].get('event_attr_id') + event_time_col = getattr(self.event_tbl.c, '#event_time') + event_name_col = getattr(self.event_tbl.c, '#event_name') + e_account_id_col = getattr(self.event_tbl.c, '#account_id') + u_account_id_col = getattr(self.user_tbl.c, '#account_id') + date_col = sa.Column('date') + who_visit = e_account_id_col + if visit_name: + who_visit = getattr(self.event_tbl.c, visit_name) + + filters, _ = await self.handler_filts((self.events[0]['filts'], self.events[0].get('relation')), + self.ext_filters) + filters = filters or [1] + selectd = [func.toStartOfDay(func.addHours(event_time_col, self.zone_time)).label('date'), + *self.groupby, + func.arrayDistinct( + (func.groupArray( + func.if_(func.and_(event_name_col == event_name_a, *filters), who_visit, None)))).label( + 'val_a'), + + func.length(sa.Column('val_a')).label('amount_a'), + func.length(sa.Column('val_b')).label('amount_b'), + ] + + if event_name_b == '*': + val_b = func.arrayDistinct( + (func.groupArray(func.if_(1, who_visit, None)))).label('val_b'), + selectd.insert(-2, *val_b) + else: + val_b = func.arrayDistinct( + (func.groupArray(func.if_(event_name_col == event_name_b, who_visit, None)))).label('val_b'), + selectd.insert(-2, *val_b) + + base_where = [ + func.addHours(event_time_col, self.zone_time) >= self.start_date, + func.addHours(event_time_col, self.zone_time) <= self.end_date, + ] + + event_filter, user_filter = await self.handler_filts( + (self.global_filters, self.global_relation), + self.ext_filters + ) + + groupby = [date_col] + self.groupby + oredrby = [date_col] + if user_filter: + qry = sa.select(selectd).select_from( + self.event_tbl.join(self.user_tbl, u_account_id_col == e_account_id_col)).where( + and_(*user_filter, *event_filter, *base_where)).group_by(*groupby).order_by( + *oredrby).limit(10000) + else: + qry = sa.select(selectd).where(and_(*base_where, *event_filter)).group_by(*groupby).order_by( + *oredrby).limit(10000) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + return {'sql': sql, + 'groupby': ['date'] + [i.key for i in self.groupby], + 'date_range': self.date_range, + 'event_name': [event_name_a, event_name_b], + 'unit_num': self.unit_num, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + } + + async def event_model_sql(self): + sqls = [] + event_time_col = getattr(self.event_tbl.c, '#event_time') + for event in self.events: + operator_ = event.get('operator_val','') + #排头显示名 + event_name_display = event.get('eventNameDisplay') + is_show = event.get('is_show', True) + + select_exprs = [] + if self.time_particle != 'total': + select_exprs.append( + settings.TIME_GRAIN_EXPRESSIONS[self.time_particle](event_time_col, self.zone_time)) + + base_where = [ + func.addHours(event_time_col, self.zone_time) >= self.start_date, + func.addHours(event_time_col, self.zone_time) <= self.end_date, + ] + event_name_col = getattr(self.event_tbl.c, '#event_name') + format = event.get('format') or 'float' + + # 兼容以前的结构 + if event.get('customEvent'): + event['customType'] = event.get('customType') or 'formula' + + if event.get('customType') == 'formula': + if event.get('customEvent'): + #组合公式的内容 + formula = event.get('customEvent') + custom = CustomEvent(self.event_tbl, formula, format).parse() + event_name = custom['event_name'] + where = [event_name_col.in_(event_name)] + event_filter, _ = await self.handler_filts((event['filts'], event.get('relation')), + (self.global_filters, self.global_relation), + self.ext_filters + ) + select_exprs.extend(self.groupby) + qry = sa.select( + *select_exprs, + custom['select'] + ).where(*base_where, *where, *event_filter) + + # 指标组合计算 + elif event.get('customType') == 'combination': + sqls.append({'combination_event': event.get('customEvent'), + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + 'event_name': event.get('eventNameDisplay'), + 'format': event.get('format') or 'float', + 'date_range': self.date_range, + 'is_show': is_show, + } + ) + continue + else: + event_name = event['event_name'] + + select_exprs += self.groupby + if event_name != '*': + base_where.append(event_name_col == event_name) + + analysis = event['analysis'] + event_filter, user_filter = await self.handler_filts( + (event['filts'], event.get('relation', 'and')), + (self.global_filters, self.global_relation) + , self.ext_filters + ) + + u_account_id_col = getattr(self.user_tbl.c, '#account_id') + # 按账号聚合 + e_account_id_col = getattr(self.event_tbl.c, '#account_id') + + if operator_ == '': + # 聚合方式 + if analysis == 'total_count': + selectd = select_exprs + [func.count().label('values')] + elif analysis == 'touch_user_count': + selectd = select_exprs + [func.count(sa.distinct(e_account_id_col)).label('values')] + elif analysis == 'touch_user_avg': + selectd = select_exprs + [ + func.round((func.count() / func.count(sa.distinct(e_account_id_col))), 2).label( + 'values')] + else: + selectd = select_exprs + [ + func.round(getattr(func, analysis)(getattr(self.event_tbl.c, event['event_attr_id'])), 2).label( + 'values')] + else: + operator_val=int(operator_) + operator=event['operator'] #运算符号 + if analysis == 'total_count': + selectd = select_exprs + [settings.ARITHMETIC[operator](func.count(),operator_val).label('values')] + elif analysis == 'touch_user_count': + selectd = select_exprs + [settings.ARITHMETIC[operator](func.count(sa.distinct(e_account_id_col)),operator_val).label('values')] + elif analysis == 'touch_user_avg': + selectd = select_exprs + [ + settings.ARITHMETIC[operator](func.round((func.count() / func.count(sa.distinct(e_account_id_col))), 2),operator_val).label( + 'values')] + else: + selectd = select_exprs + [ + settings.ARITHMETIC[operator](func.round(getattr(func, analysis)(getattr(self.event_tbl.c, event['event_attr_id'])), 2),operator_val).label( + 'values')] + + if user_filter: + qry = sa.select(selectd).select_from( + self.event_tbl.join(self.user_tbl, u_account_id_col == e_account_id_col)).where( + and_(*user_filter, *event_filter, *base_where)) + + else: + qry = sa.select(selectd).where(and_(*event_filter, *base_where)) + + qry = qry.group_by(*select_exprs) + if self.time_particle != 'total': + qry = qry.order_by(sa.Column('date')) + else: + qry = qry.order_by(sa.Column('values').desc()) + qry = qry.limit(10000) + + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + # 单独付费率的拿出来 + if event.get('customEvent') == 'pay.touch_user_count/login.touch_user_count': + stat_date=self.start_date + end_date=self.end_date + game=self.game + sql=f""" + select aa.date as date,round((a/b)*100,2) as values from + (select toDate(addHours({game}.event."#event_time", 8)) AS date,uniqCombined(if({game}.event."#event_name" = 'pay', {game}.event."#account_id", NULL)) as a from {game}.event + WHERE addHours({game}.event."#event_time", 8) >= '{stat_date}' AND addHours({game}.event."#event_time", 8) <= '{end_date}' + AND {game}.event."#event_name"='pay' and orderid NOT LIKE '%GM%' GROUP BY toDate(addHours({game}.event."#event_time", 8))) as aa + LEFT join + (SELECT toDate(addHours({game}.event."#event_time", 8)) AS date, round(uniqExact({game}.event."#account_id"), 2) AS b +FROM {game}.event +WHERE addHours({game}.event."#event_time", 8) >= '{stat_date}' AND addHours({game}.event."#event_time", 8) <= '{end_date}' +GROUP BY toDate(addHours({game}.event."#event_time", 8))) as bb on aa.date = bb.date ORDER by date + """ + # 单独把新增付费人数(以设备为维度)拿出来 + if event.get('event_attr') == '触发用户数' and ['is_new_device', 'orderid']== [i['columnName'] for i in event.get('filts')]: + stat_date=self.start_date + end_date=self.end_date + game=self.game + sql=f"""SELECT toDate(addHours("#event_time", 8)) as date, +round(uniqExact("#distinct_id"), 2) AS values FROM +(SELECT toDate(addHours("#event_time", 8)) as date,"#event_time",`#event_name`,`#distinct_id`,`#account_id` from {game}.event WHERE +addHours("#event_time", 8) >= '{stat_date}' AND addHours("#event_time", 8) <= '{end_date}' +and `#event_name` = 'pay' and orderid NOT LIKE '%GM%') a +inner join +(SELECT toDate(addHours("#event_time", 8)) as date,"#event_time",is_new_device,`#distinct_id`,`#event_name`,`#account_id` from {game}.event WHERE +addHours("#event_time", 8) >= '{stat_date}' AND addHours("#event_time", 8) <= '{end_date}' and +`#event_name` = 'create_account' and is_new_device = 1) b on a.`#distinct_id`= b.`#distinct_id` and a.date = b.date +GROUP BY toDate(addHours("#event_time", 8))""" + + sqls.append({'sql': sql, + 'groupby': [i.key for i in self.groupby], + 'date_range': self.date_range, + 'event_name': event_name_display or event_name, + 'format': format, + 'report_name': self.report_name or 'temp', + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + 'is_show': is_show, + }) + + return sqls + #在漏斗分析,事件分析模型里面都有用到这块 + async def funnel_model_sql(self): + """ +SELECT level, count(*) AS values +FROM (SELECT windowFunnel(86400)(shjy.event."#event_time", shjy.event."#event_name" = 'create_role', + shjy.event."#event_name" = 'login') AS level + FROM shjy.event + WHERE addHours(shjy.event."#event_time", 8) >= '2021-05-16 00:00:00' + AND addHours(shjy.event."#event_time", 8) <= '2021-06-14 23:59:59' + GROUP BY shjy.event."#account_id") AS anon_1 +GROUP BY level +ORDER BY level + :return: + """ + + windows_gap = self.event_view['windows_gap'] * 86400 + event_time_col = getattr(self.event_tbl.c, '#event_time') + event_name_col = getattr(self.event_tbl.c, '#event_name') + date_col = func.toStartOfDay(func.addHours(event_time_col, self.zone_time)).label('date') + e_account_id_col = getattr(self.event_tbl.c, '#account_id') + + sub_group = [date_col, *self.groupby, e_account_id_col] + conds = [] + cond_level = [] + for item in self.events: + event_filter, _ = await self.handler_filts((item['filts'], item.get('relation', 'and')) + , self.ext_filters) + conds.append( + and_(event_name_col == item['eventName'], *event_filter) + ) + cond_level.append(item['eventName']) + # todo 替换 _windows_gap_ + subq = sa.select(*[sa.Column(i.key) for i in self.groupby], date_col, + func.windowFunnel_windows_gap__(event_time_col, *conds).label('level')).select_from( + self.event_tbl) + + g_event_filter, _ = await self.handler_filts((self.global_filters, self.global_relation) + , self.ext_filters) + where = [ + func.addHours(event_time_col, self.zone_time) >= self.start_date, + func.addHours(event_time_col, self.zone_time) <= self.end_date, + *g_event_filter + ] + subq = subq.where(and_(*where)).group_by(*sub_group) + subq = subq.subquery() + + qry = sa.select(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level'), + func.count().label('values')).select_from(subq) \ + .where(sa.Column('level') > 0) \ + .group_by(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level')) \ + .order_by(sa.Column('date'), *[sa.Column(i.key) for i in self.groupby], sa.Column('level')) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + # sql = sql.replace('_windows_gap_', f"({windows_gap},'strict_increase')") + sql = sql.replace('_windows_gap_', f"({windows_gap})") + print(sql) + return {'sql': sql, + 'groupby': [i.key for i in self.groupby], + 'date_range': self.date_range, + 'cond_level': cond_level, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + } + + async def scatter_model_sql(self): + event = self.events[0] + event_name = event['eventName'] + analysis = event['analysis'] + if analysis in ['list_distinct',"set_distinct","ele_distinct"]: + analysis = 'max' + e_account_id_col = getattr(self.event_tbl.c, '#account_id').label('uid') + u_account_id_col = getattr(self.user_tbl.c, '#account_id') + event_name_col = getattr(self.event_tbl.c, '#event_name') + event_time_col = getattr(self.event_tbl.c, '#event_time').label('date') + event_date_col = settings.TIME_GRAIN_EXPRESSIONS[self.time_particle](event_time_col, self.zone_time) + + quota_interval_arr = event.get('quotaIntervalArr') + + where = [ + # event_date_col >= self.start_date, + # event_date_col <= self.end_date, + func.addHours(event_time_col, self.zone_time) >= self.start_date, + func.addHours(event_time_col, self.zone_time) <= self.end_date, + + ] + if event_name != '*': + where.append(event_name_col == event_name) + event_filter, user_filter = await self.handler_filts((event['filts'], event.get('relation', 'and')), + (self.global_filters, self.global_relation) + , self.ext_filters) + if user_filter: + where.append(e_account_id_col.in_(sa.select(u_account_id_col).where(*user_filter))) + where.extend(event_filter) + values_col = func.count().label('values') + if analysis in ['number_of_days', 'number_of_hours']: + values_col = func.count(func.distinct(e_account_id_col)).label('values') + + if analysis in ['times', 'number_of_days', 'number_of_hours']: + if self.time_particle == 'total': + qry = sa.select(*self.groupby, values_col) \ + .where(and_(*where)) \ + .group_by(*self.groupby, e_account_id_col) + else: + qry = sa.select(event_date_col, *self.groupby, values_col) \ + .where(and_(*where)) \ + .group_by(event_date_col, *self.groupby, e_account_id_col) + + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + return { + 'sql': sql, + 'interval_type': event['intervalType'], + 'analysis': analysis, + 'quota_interval_arr': quota_interval_arr, + 'groupby': [i.key for i in self.groupby], + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + } + elif event.get('quota'): + event_attr_col = getattr(self.event_tbl.c, event['quota']) + if self.time_particle == 'total': + qry = sa.select(e_account_id_col, + settings.CK_FUNC[analysis](event_attr_col).label('values')) \ + .where(and_(*where)) \ + .group_by(*self.groupby, e_account_id_col) + else: + # qry = sa.select(event_date_col, e_account_id_col, + # settings.CK_FUNC[analysis](event_attr_col).label('values')) \ + # .where(and_(*where)) \ + # .group_by(event_date_col, *self.groupby, e_account_id_col) + qry = sa.select(event_date_col, e_account_id_col, + settings.CK_FUNC[analysis](event_attr_col).label('values')) \ + .where(and_(*where)) \ + .group_by(event_date_col,e_account_id_col) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + return { + 'sql': sql, + 'interval_type': event['intervalType'], + 'analysis': analysis, + 'quota_interval_arr': quota_interval_arr, + 'groupby': [i.key for i in self.groupby], + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + } + + def trace_model_sql(self): + session_interval = self.event_view.get('session_interval') + session_type = self.event_view.get('session_type') + session_type_map = { + 'minute': 60, + 'second': 1, + 'hour': 3600 + + } + interval_ts = session_interval * session_type_map.get(session_type, 60) + event_names = self.events.get('event_names') + source_event = self.events.get('source_event', {}).get('eventName') + source_type = self.events.get('source_event', {}).get('source_type') + + sql_a = f"""with + '{source_event}' as start_event, + {tuple(event_names)} as evnet_all, + '{self.start_date}' as start_data, + '{self.end_date}' as end_data +select event_chain, + count() as values +from (with + toUInt32(minIf(`#event_time`, `#event_name` = start_event)) AS start_event_ts, + arraySort( + x -> + x.1, + arrayFilter( + x -> x.1 >= start_event_ts, + groupArray((toUInt32(`#event_time`), `#event_name`)) + ) + ) AS sorted_events, + arrayEnumerate(sorted_events) AS event_idxs, + arrayFilter( + (x, y, z) -> z.1 >= start_event_ts and ((z.2 = start_event and y > {interval_ts}) or y > {interval_ts}) , + event_idxs, + arrayDifference(sorted_events.1), + sorted_events + ) AS gap_idxs, + arrayMap(x -> x, gap_idxs) AS gap_idxs_, + arrayMap(x -> if(has(gap_idxs_, x), 1, 0), event_idxs) AS gap_masks, + arraySplit((x, y) -> y, sorted_events, gap_masks) AS split_events + select `#account_id`, + arrayJoin(split_events) AS event_chain_, + arrayMap(x -> + x.2, event_chain_) AS event_chain, + has(event_chain, start_event) AS has_midway_hit + + from (select `#event_time`, `#event_name`, `#account_id` + from {self.game}.event + where addHours(`#event_time`, {self.zone_time}) >= start_data + and addHours(`#event_time`, {self.zone_time}) <= end_data + and `#event_name` in evnet_all) + group by `#account_id` + HAVING has_midway_hit = 1 + ) +where arrayElement(event_chain, 1) = start_event +GROUP BY event_chain +ORDER BY values desc +""" + sql_b = f"""with + '{source_event}' as end_event, + {tuple(event_names)} as evnet_all, + '{self.start_date}' as start_data, + '{self.end_date}' as end_data +select event_chain, + count() as values +from (with + toUInt32(maxIf(`#event_time`, `#event_name` = end_event)) AS end_event_ts, + arraySort( + x -> + x.1, + arrayFilter( + x -> x.1 <= end_event_ts, + groupArray((toUInt32(`#event_time`), `#event_name`)) + ) + ) AS sorted_events, + arrayEnumerate(sorted_events) AS event_idxs, + arrayFilter( + (x, y, z) -> z.1 <= end_event_ts and (z.2 = end_event and y>{interval_ts}) OR y > {interval_ts}, + event_idxs, + arrayDifference(sorted_events.1), + sorted_events + ) AS gap_idxs, + arrayMap(x -> x+1, gap_idxs) AS gap_idxs_, + arrayMap(x -> if(has(gap_idxs_, x), 1,0), event_idxs) AS gap_masks, + arraySplit((x, y) -> y, sorted_events, gap_masks) AS split_events + select `#account_id`, + arrayJoin(split_events) AS event_chain_, + arrayMap(x -> + x.2, event_chain_) AS event_chain, + has(event_chain, end_event) AS has_midway_hit + from (select `#event_time`, `#event_name`, `#account_id` + from {self.game}.event + where addHours(`#event_time`, {self.zone_time}) >= start_data + and addHours(`#event_time`, {self.zone_time}) <= end_data + and `#event_name` in evnet_all) + group by `#account_id` + HAVING has_midway_hit = 1 + ) +where arrayElement(event_chain, -1) = end_event +GROUP BY event_chain +ORDER BY values desc""" + + sql = sql_a if source_type == 'initial_event' else sql_b + print(sql) + return { + 'sql': sql, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + } + + async def retention_model_sql2(self): + filter_item_type = self.event_view.get('filter_item_type') + filter_item = self.event_view.get('filter_item') + event_name_a = self.events[0]['eventName'] + event_name_b = self.events[1]['eventName'] + + visit_name = self.events[0].get('event_attr_id') + + where, _ = await self.handler_filts((self.events[0]['filts'], self.events[0].get('relation', 'and')), + (self.global_filters, self.global_relation) + , self.ext_filters) + where_a = '1' + if where: + qry = sa.select().where(*where) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + where_a = 'WHERE '.join(sql.split('WHERE ')[1:]) + + where, _ = await self.handler_filts((self.events[1]['filts'], self.events[1].get('relation', 'and')), + (self.global_filters, self.global_relation) + , self.ext_filters) + where_b = '1' + if where: + qry = sa.select().where(*where) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + where_b = sql.split('WHERE ')[1] + + # 任意事件 + event_name_b = 1 if event_name_b == '*' else f"`#event_name` = '{event_name_b}'" + + days = (arrow.get(self.end_date).date() - arrow.get(self.start_date).date()).days + keep = [] + cnt = [] + retention_n = [*[k for k in range(1, 60)], 70-1, 75-1, 80-1, 85-1, 90-1, 95-1, 100-1, 110-1, 120-1, 150-1, 180-1, 210-1, 240-1, 270-1, 300-1, + 360-1] + + """ + cnt0-cnt1 as on1, + round(on1 * 100 / cnt0, 2) as `0p1`, + """ + + for i in retention_n: + keep.append( + f"""cnt{i}, + round(cnt{i} * 100 / cnt0, 2) as `p{i}`, + cnt0-cnt{i} as on{i}, + round(on{i} * 100 / cnt0, 2) as `op{i}` + """) + cnt.append(f"""sum(if(dateDiff('day',a.reg_date,b.visit_date)={i},1,0)) as cnt{i}""") + keep_str = ','.join(keep) + cnt_str = ','.join(cnt) + + sql = f""" +with '{event_name_a}' as start_event, + {event_name_b} as retuen_visit, + `{visit_name}` as visit, + '{self.start_date}' as start_data, + '{self.end_date}' as end_data, + toDate(addHours(`#event_time`, {self.zone_time})) as date + +select reg_date, + cnt0 , + {keep_str} + + from(select date, uniqExact(visit) as cnt0 from {self.game}.event +where `#event_name` = start_event and addHours(`#event_time`, {self.zone_time}) >= start_data and addHours(`#event_time`, {self.zone_time}) <= end_data and {where_a} +group by date) reg left join +(select a.reg_date, + {cnt_str} +from (select date as reg_date, visit from {self.game}.event where `#event_name` = start_event and addHours(`#event_time`, {self.zone_time}) >= start_data and addHours(`#event_time`, {self.zone_time}) <= end_data and {where_a} group by reg_date, visit) a + left join (select date as visit_date, visit from {self.game}.event where retuen_visit and addHours(`#event_time`, {self.zone_time}) >= start_data group by visit_date, visit) b on +a.visit = b.visit +group by a.reg_date) log on reg.date=log.reg_date +""" + print(sql) + return { + 'sql': sql, + 'date_range': self.date_range, + 'unit_num': self.unit_num, + 'retention_n': retention_n, + 'filter_item_type': filter_item_type, + 'filter_item': filter_item, + 'time_particle': self.time_particle, + 'start_date': self.start_date[:10], + 'end_date': self.end_date[:10], + } diff --git a/models/user_analysis.py b/models/user_analysis.py new file mode 100644 index 0000000..ccd2532 --- /dev/null +++ b/models/user_analysis.py @@ -0,0 +1,223 @@ +from typing import Tuple + +import arrow +import sqlalchemy as sa +import json + +from fastapi import Depends + +import pandas as pd + +from sqlalchemy import func, or_, and_, not_ + +import crud +import schemas +from core.config import settings +from db import get_database +from db.redisdb import get_redis_pool, RedisDrive + + +class UserAnalysis: + def __init__(self, game: str, data_in: schemas.CkQuery, rdb: RedisDrive = Depends(get_redis_pool)): + self.game = game + self.rdb = rdb + self.user_tbl = None + self.event_view = data_in.eventView + self.events = data_in.events + + self.zone_time: int = 0 + self.data_in = data_in + + self.global_filters = [] + self.groupby = None + self.time_particle = None + self.date_range = None + self.unit_num = None + self.global_relation = 'and' + self.ext_filters = (self.data_in.ext_filter.get('filts', []), self.data_in.ext_filter.get('relation', 'and')) + + async def init(self, *args, **kwargs): + if self.data_in.report_id: + db = get_database() + report = await crud.report.get(db, id=self.data_in.report_id) + self.event_view = report['query']['eventView'] + self.events = report['query']['events'] + + else: + self.event_view = self.data_in.eventView + self.events = self.data_in.events + + await self._init_table() + self.zone_time = self._get_zone_time() + self.time_particle = self._get_time_particle_size() + self.groupby = self._get_group_by() + self.unit_num = self._get_unit_num() + self.global_relation = self.event_view.get('relation', 'and') + # 用户自带过滤 + if 'data_where' in kwargs: + self.global_filters.extend(kwargs['data_where'].get(self.game, [])) + + async def _init_table(self): + """ + 从redis中取出表字段,构建表结构 + :return: + """ + res_json = await self.rdb.get(f'{self.game}_user') + columns = json.loads(res_json).keys() + metadata = sa.MetaData(schema=self.game) + self.user_tbl = sa.Table('user_view', metadata, *[sa.Column(column) for column in columns]) + + def _get_time_particle_size(self): + return self.event_view.get('timeParticleSize') or 'P1D' + + def _get_unit_num(self): + return self.event_view.get('unitNum') + + def _get_group_by(self): + return [getattr(self.user_tbl.c, item['columnName']) for item in self.event_view.get('groupBy', [])] + + def _get_zone_time(self): + return int(self.event_view.get('zone_time', 8)) + + # def _get_filters(self, filters): + # tbl = self.user_tbl + # where = [] + # for item in filters: + # col = getattr(tbl.c, item['columnName']) + # + # comparator = item['comparator'] + # ftv = item['ftv'] + # if comparator == '==': + # if len(ftv) > 1: + # where.append(or_(*[col == v for v in ftv])) + # else: + # where.append(col == ftv[0]) + # elif comparator == '>=': + # where.append(col >= ftv[0]) + # elif comparator == '<=': + # where.append(col <= ftv[0]) + # elif comparator == '>': + # where.append(col > ftv[0]) + # elif comparator == '<': + # where.append(col < ftv[0]) + # + # elif comparator == 'is not null': + # where.append(col.isnot(None)) + # elif comparator == 'is null': + # where.append(col.is_(None)) + # + # elif comparator == '!=': + # where.append(col != ftv[0]) + # + # elif comparator == 'like': + # where.append(col.like(f'%{ftv[0]}%')) + # + # elif comparator == 'not like': + # where.append(col.notlike(f'%{ftv[0]}%')) + # + # elif comparator == 'in': + # where.append(col.in_(ftv)) + # + # + # return where + + def handler_filts(self, *filters): + """ + :param filters: (filts:list,relation:str) + :param g_f: + :param relation: + :return: + """ + + user_filters = [] + for filter in filters: + filts = filter[0] + relation = filter[1] + user_filter = [] + for item in filts: + + where = user_filter + + col = sa.Column(item['columnName']) + if item.get('data_type') == 'datetime': + col = func.addHours(col, self.zone_time) + + comparator = item['comparator'] + ftv = item['ftv'] + if comparator == '==': + if len(ftv) > 1: + where.append(or_(*[col == v for v in ftv])) + else: + where.append(col == ftv[0]) + elif comparator == '>=': + where.append(col >= ftv[0]) + elif comparator == '<=': + where.append(col <= ftv[0]) + elif comparator == '>': + where.append(col > ftv[0]) + elif comparator == '<': + where.append(col < ftv[0]) + + elif comparator == 'is not null': + where.append(col.isnot(None)) + elif comparator == 'is null': + where.append(col.is_(None)) + + elif comparator == 'like': + where.append(col.like(f'%{ftv[0]}%')) + + elif comparator == 'not like': + where.append(col.notlike(f'%{ftv[0]}%')) + + elif comparator == 'in': + where.append(col.in_(ftv)) + + elif comparator == '!=': + where.append(col != ftv[0]) + if relation == 'and': + if user_filter: + user_filters.append(and_(*user_filter)) + else: + if user_filter: + user_filters.append(or_(*user_filter)) + + return user_filters + + def property_model(self): + event = self.events + selectd = getattr(self.user_tbl.c, event['quota']) + qry = sa.select(selectd) + + account_id_col = getattr(self.user_tbl.c, '#account_id') + binduid_col = getattr(self.user_tbl.c, '#account_id') + # 聚合方式 + analysis = event['analysis'] + + if analysis == 'trig_user_num': + selectd = [func.count().label('values')] + elif analysis == 'distinct_count': + selectd = [ + func.count(sa.distinct(getattr(self.user_tbl.c, event['quota']))).label('values')] + + else: + selectd = [ + func.round(getattr(func, analysis)(getattr(self.user_tbl.c, event['quota'])), 2).label( + 'values')] + + where = self.handler_filts((event['filts'], event.get('relation')), + (self.global_filters, self.global_relation), + self.ext_filters + ) + qry = sa.select((*self.groupby, *selectd)).where(*where) + + qry = qry.group_by(*self.groupby) + qry = qry.order_by(sa.Column('values').desc()) + qry = qry.limit(1000) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + result = {'sql': sql, + 'groupby': [i.key for i in self.groupby], + 'quota': event['quota'] + } + + return result diff --git a/models/user_label.py b/models/user_label.py new file mode 100644 index 0000000..88b4075 --- /dev/null +++ b/models/user_label.py @@ -0,0 +1,234 @@ +""" +本质查出符合条件的用户id +得到sql 查uid +""" + +import re +from typing import Tuple + +import arrow +import sqlalchemy as sa +import json + +from fastapi import Depends + +import pandas as pd + +from sqlalchemy import func, or_, and_, not_ + +import crud +import schemas +from core.config import settings +from db import get_database + +from db.redisdb import get_redis_pool, RedisDrive + + +class UserClusterDef: + def __init__(self, game: str, cluster_name: str, data_where: list = None, rdb: RedisDrive = get_redis_pool(), + **kwargs): + self.game = game + self.rdb = rdb + self.cluster_name = cluster_name + self.event_tbl = None + self.data_where = data_where or [] + self.kwargs = kwargs + + async def _init_tal(self): + res_json = await self.rdb.get(f'{self.game}_event') + columns = json.loads(res_json).keys() + metadata = sa.MetaData(schema=self.game) + self.event_tbl = sa.Table('event', metadata, *[sa.Column(column) for column in columns]) + + res_json = await self.rdb.get(f'{self.game}_user') + columns = json.loads(res_json).keys() + metadata = sa.MetaData(schema=self.game) + self.user_tbl = sa.Table('user_view', metadata, *[sa.Column(column) for column in columns]) + + self.u_account_id_col = getattr(self.user_tbl.c, '#account_id') + self.e_account_id_col = getattr(self.event_tbl.c, '#account_id') + self.account_id_col = sa.Column('#account_id') + + async def init(self): + + self.data_in = ( + await crud.user_label.find_one(get_database(), {'cluster_name': self.cluster_name, 'game': self.game}, + {'qp': 1})).get('qp') + await self._init_tal() + self.events = self.data_in['user_cluster_def']['events'] + self.event_relation = self.data_in['user_cluster_def']['event_relation'] + + async def handler_filts(self, *filters): + """ + + :param filters: (filts:list,relation:str) + :param g_f: + :param relation: + :return: + """ + + user_filters = [] + event_filters = [] + for filter in filters: + filts = filter[0] + relation = filter[1] + user_filter = [] + event_filter = [] + for item in filts: + comparator = item['comparator'] + if item['tableType'] == 'user': + where = user_filter + elif item['tableType'] == 'event': + where = event_filter + else: + continue + + tbl = getattr(self, f'{item["tableType"]}_tbl') + col = getattr(tbl.c, item['columnName']) + + ftv = item['ftv'] + if comparator == '==': + if len(ftv) > 1: + where.append(or_(*[col == v for v in ftv])) + else: + where.append(col == ftv[0]) + elif comparator == '>=': + where.append(col >= ftv[0]) + elif comparator == '<=': + where.append(col <= ftv[0]) + elif comparator == '>': + where.append(col > ftv[0]) + elif comparator == '<': + where.append(col < ftv[0]) + + elif comparator == 'is not null': + where.append(col.isnot(None)) + elif comparator == 'is null': + where.append(col.is_(None)) + + elif comparator == 'like': + where.append(col.like(f'%{ftv[0]}%')) + + elif comparator == 'not like': + where.append(col.notlike(f'%{ftv[0]}%')) + + elif comparator == 'in': + where.append(col.in_(ftv)) + + elif comparator == '!=': + where.append(col != ftv[0]) + if relation == 'and': + if event_filter: + event_filters.append(and_(*event_filter)) + if user_filter: + user_filters.append(and_(*user_filter)), + else: + if event_filter: + event_filters.append(or_(*event_filter)) + if user_filter: + user_filters.append(or_(*user_filter)) + + return event_filters, user_filters + + def to_sql_qry(self): + qry = None + for event in self.events: + event_name = event['event_name'] + event_name_col = getattr(self.event_tbl.c, '#event_name') + analysis = event['prop_quota']['analysis'] + quota = event['prop_quota']['quota'] + num = event['num'].split(',') + date_type = event.get('date_type', 'dynamic') + e_days = event.get('e_days') + s_days = event.get('s_days') + is_touch = event.get('is_touch', True) + + filts = event['filts'] + zone = event.get('zone', 8) + + # 账号数据过滤 + data_where = [] + filters = [] + filters.extend(self.data_where) + for item in filters: + tmp = settings.CK_CALC_SYMBO[item['comparator']](sa.Column(item['columnName']), item['ftv']) + data_where.append(tmp) + + event_time_col = func.addHours(getattr(self.event_tbl.c, '#event_time'), zone) + date_where = [] + if date_type == 'static': + start_time = event['start_time'] + end_time = event['end_time'] + date_where.extend( + [settings.CK_CALC_SYMBO['>='](event_time_col, start_time), + settings.CK_CALC_SYMBO['<='](event_time_col, end_time)] + ) + elif date_type == 'dynamic': + start_time = arrow.get().shift(days=-int(s_days)).strftime('%Y-%m-%d 00:00:00') + end_time = arrow.get().shift(days=-int(e_days)).strftime('%Y-%m-%d 23:59:59') + date_where.extend( + [settings.CK_CALC_SYMBO['>='](event_time_col, start_time), + settings.CK_CALC_SYMBO['<='](event_time_col, end_time)] + ) + else: + # 所有时间 + pass + + uce_calcu_symbol = event['uce_calcu_symbol'] + + event_name_where = [] + if event_name != '*': + # 任意事件 + event_name_where.append(settings.CK_CALC_SYMBO['=='](event_name_col, event_name)) + if quota != '*': + selectd = [self.account_id_col, + func.round(getattr(func, analysis)(getattr(self.event_tbl.c, quota)), 2).label( + 'values') + ] + qry_tmp = sa.select(self.account_id_col).select_from( + sa.select(selectd).where(*date_where, *event_name_where, *data_where).group_by( + self.e_account_id_col).having( + settings.CK_CALC_SYMBO[uce_calcu_symbol](sa.Column('values'), *num))) + else: + selectd = [self.account_id_col] + qry_tmp = sa.select(self.account_id_col).select_from( + sa.select(selectd).where(*date_where, *event_name_where, *data_where)) + + if qry is None: + qry = qry_tmp + else: + if self.event_relation == 'and': + qry = sa.select(self.account_id_col).select_from( + sa.join(qry, qry_tmp, getattr(qry.c, '#account_id') == getattr(qry_tmp.c, '#account_id'))) + elif self.event_relation == 'or': + qry = sa.select(sa.distinct(self.account_id_col)).select_from(sa.union_all(qry, qry_tmp)) + # 处理没做过 + if not is_touch: + qry = sa.select(self.u_account_id_col).where(self.u_account_id_col.notin_(qry)) + + return qry + + def to_sql(self): + qry = self.to_sql_qry() + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + return sql + + def cluster_user_list(self): + sub_qry = self.to_sql_qry() + page = self.kwargs.get('page') or 1 + page -= 1 + limit = self.kwargs.get('limit', 50) + qry = sa.select('*').where(self.u_account_id_col.in_(sub_qry)).order_by(sa.Column('#reg_time')) \ + .offset(page * limit) \ + .limit(limit) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + return sql + + def cluster_user_count(self): + sub_qry = self.to_sql_qry() + qry = sa.select(func.count(self.account_id_col).label('values')).select_from(sub_qry) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + print(sql) + return sql diff --git a/models/x_analysis.py b/models/x_analysis.py new file mode 100644 index 0000000..e6a4654 --- /dev/null +++ b/models/x_analysis.py @@ -0,0 +1,203 @@ +from typing import Tuple + +import arrow +import sqlalchemy as sa +import json + +from fastapi import Depends + +import pandas as pd + +from sqlalchemy import func, or_, and_, not_, MetaData + +import crud +import schemas +from core.config import settings +from db import get_database +from db.redisdb import get_redis_pool, RedisDrive + + +class XAnalysis: + def __init__(self, data_in: schemas.CkQuery, game: str): + self.data_in = data_in + self.game = game + self.event_view = dict() + self.events = [] + + self.global_filters = [] + self.account_filters = [] + self.global_relation = 'and' + self.date_range = [] + + self.ext_filters = (self.data_in.ext_filter.get('filts', []), self.data_in.ext_filter.get('relation', 'and')) + + def _get_global_filters(self): + return self.event_view.get('filts') or [] #获取event_view字典里面filts的值,或返回空列表 + + async def init(self, *args, **kwargs): + if self.data_in.report_id: + db = get_database() + report = await crud.report.get(db, id=self.data_in.report_id) + self.event_view = report['query']['eventView'] + self.events = report['query']['events'] + try: + e_days = self.event_view['e_days'] + s_days = self.event_view['s_days'] + except: + # 兼容以前的 + e_days, s_days = self.event_view['recentDay'].split('-') + # self.event_view['endTime'] = arrow.get().shift(days=-int(e_days)+1).strftime('%Y-%m-%d 23:59:59') + # self.event_view['startTime'] = arrow.get().shift(days=-int(s_days)+1).strftime('%Y-%m-%d 00:00:00') + self.event_view['endTime'] = arrow.get().shift(days=-int(e_days)).strftime('%Y-%m-%d 23:59:59') + self.event_view['startTime'] = arrow.get().shift(days=-int(s_days)).strftime('%Y-%m-%d 00:00:00') + + else: + self.event_view = self.data_in.eventView + self.events = self.data_in.events + for d in pd.date_range(self.event_view['startTime'], self.event_view['endTime'], freq='D', tz='UTC'): + self.date_range.append(d.date()) + + self.global_filters = self._get_global_filters() + self.global_relation = self.event_view.get('relation', 'and') + + # 用户自带过滤 + if 'data_where' in kwargs: + self.account_filters = kwargs['data_where'].get(self.game, []) + + def handler_filts(self, *filters): + """ + :param filters: (filts:list,relation:str) + :param g_f: + :param relation: + :return: + """ + + event_filters = [] + for filter in filters: + filts = filter[0] + relation = filter[1] + event_filter = [] + for item in filts: + + where = event_filter + + col = sa.Column(item['columnName']) + + comparator = item['comparator'] + ftv = item['ftv'] + if comparator == '==': + if len(ftv) > 1: + where.append(or_(*[col == v for v in ftv])) + else: + where.append(col == ftv[0]) + elif comparator == '>=': + where.append(col >= ftv[0]) + elif comparator == '<=': + where.append(col <= ftv[0]) + elif comparator == '>': + where.append(col > ftv[0]) + elif comparator == '<': + where.append(col < ftv[0]) + + elif comparator == 'is not null': + where.append(col.isnot(None)) + elif comparator == 'is null': + where.append(col.is_(None)) + + elif comparator == 'like': + where.append(col.like(f'%{ftv[0]}%')) + + elif comparator == 'not like': + where.append(col.notlike(f'%{ftv[0]}%')) + + elif comparator == 'in': + where.append(col.in_(ftv)) + + elif comparator == '!=': + where.append(col != ftv[0]) + if relation == 'and': + if event_filter: + event_filters.append(and_(*event_filter)) + else: + if event_filter: + event_filters.append(or_(*event_filter)) + + return event_filters + + + def ltv_model_sql(self): + days = (arrow.get(self.event_view['endTime']).date() - arrow.get(self.event_view['startTime']).date()).days + quota = self.event_view['quota'] + select_ltv = [] + sumpay = [] + sum_money = [] + # for i in range(1, days + 2): + ltv_n = [*[k for k in range(1, 61)], 70, 75, 80, 85, 90, 95, 100, 110, 120, 150, 180, 210, 240, 270, 300, 360] + for i in ltv_n: + # select_ltv.append(func.round(sa.Column(f'sumpay_{i}') / sa.Column('cnt1'), 2).label(f'LTV{i}')) + select_ltv.append( + f"if(dateDiff('day', reg.date, now())<{i - 1}, '-',toString(round(sumpay_{i} / cnt1, 2))) AS LTV{i}") + sumpay.append(f"sum(if(dateDiff('day', a.date, b.date) < {i}, money, 0)) as sumpay_{i}") + sum_money.append(f"sumpay_{i}") + # qry = sa.select(*select_ltv) + # select_ltv_str = str(qry.compile(compile_kwargs={"literal_binds": True})) + # select_ltv_str = select_ltv_str.split('SELECT ')[1] + sumpay_str = ','.join(sumpay) + select_ltv_str = ','.join(select_ltv) + sum_money_str = ','.join(sum_money) + + where = [ + sa.Column('date') >= self.event_view['startTime'].split(' ')[0], + sa.Column('date') <= self.event_view['endTime'].split(' ')[0] + ] + if quota == '#distinct_id': + where.append(sa.Column('is_new_device') == 1) + + qry = sa.select().where(*where) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + where_str = sql.split('WHERE ')[1] + + where_order = self.handler_filts((self.global_filters, self.global_relation)) #global_relation就是 and + where_order_str = 1 + if where_order: + qry = sa.select().where(*where_order) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + where_order_str = sql.split('WHERE ')[1] + + where_account = self.handler_filts((self.account_filters, 'and'), self.ext_filters) + where_account_str = 1 + if where_account: + + qry = sa.select().where(*where_account) + sql = str(qry.compile(compile_kwargs={"literal_binds": True})) + where_account_str = sql.split('WHERE ')[1] + sql = f"""SELECT reg.date as date, + cnt1, + {select_ltv_str}, + {sum_money_str} + FROM (SELECT toDate(addHours(`#event_time`, `#zone_offset`)) as date, uniqExact(`{quota}`) cnt1 + FROM {self.game}.event + where `#event_name` = 'create_account' + AND {where_str} AND {where_account_str} + GROUP BY toDate(addHours(`#event_time`, `#zone_offset`))) as reg + left join + (select a.date, + {sumpay_str} + from (SELECT toDate(addHours(`#event_time`, `#zone_offset`)) as date, `{quota}` + FROM {self.game}.event + where `#event_name` = 'create_account' + AND {where_str} AND {where_account_str} ) as a + left join (select `{quota}`, unitPrice/100 as money, toDate(addHours(`#event_time`, `#zone_offset`)) as date + from {self.game}.event + where `#event_name` = 'pay' and {where_order_str} AND {where_account_str}) b + on a.`{quota}` = b.`{quota}` + group by a.date) log on reg.date = log.date + order by date + """ + print(sql) + return {'sql': sql, 'quota': quota, + 'start_date': self.event_view['startTime'][:10], + 'end_date': self.event_view['endTime'][:10], + 'date_range': self.date_range, + 'ltv_n': ltv_n + } \ No newline at end of file diff --git a/schemas/__init__.py b/schemas/__init__.py new file mode 100644 index 0000000..c297ede --- /dev/null +++ b/schemas/__init__.py @@ -0,0 +1,27 @@ +from .msg import Msg +from .user import * +from .project import * +from .folder import * +from .space import * +from .dashboard import * +from .report import * +from .authotity import * +from .table_struct import * +from .data_auth import * +from .data_attr import * +from .sql import * +from .api_log import * +from .event_mana import * +from .xquery import * +from .api_list import * +from .role import * +from .check_data import * +from .userlabel import * +from .select_map import * +from .project_number import * +from .proid_map import * +from .api_board import * +from .url_list import * +from .user_url import * +from .api_module import * +from .event_list import * \ No newline at end of file diff --git a/schemas/api_board.py b/schemas/api_board.py new file mode 100644 index 0000000..b689d0a --- /dev/null +++ b/schemas/api_board.py @@ -0,0 +1,12 @@ +from typing import Any, List, Union + +from pydantic import BaseModel, Field + +from schemas import DBBase +from typing import Optional + + +class Api_board(BaseModel): + api_path: str = None + api_name: str = None + name: str \ No newline at end of file diff --git a/schemas/api_list.py b/schemas/api_list.py new file mode 100644 index 0000000..014c893 --- /dev/null +++ b/schemas/api_list.py @@ -0,0 +1,37 @@ +from typing import Any, List, Union + +from pydantic import BaseModel, Field + +from schemas import DBBase +from typing import Optional + + +class ApiBase(BaseModel): + path: str = None + name: str = None + desc: str = None + + +class AddApi(ApiBase): + path: str + name: str + desc: str = None + + +class UpdateApi(BaseModel): + path: str + name: str + + +class AddApiDB(DBBase, AddApi): + pass + + +class DelApi(BaseModel): + ids: List[str] = Field(..., description='要删除的id') + + +class EditApi(BaseModel): + id: str = Field(..., description='要编辑的id') + name: str + desc: str diff --git a/schemas/api_log.py b/schemas/api_log.py new file mode 100644 index 0000000..ae08eb4 --- /dev/null +++ b/schemas/api_log.py @@ -0,0 +1,9 @@ +from typing import Any + +from pydantic import BaseModel + + +class ApiLogInsert(BaseModel): + api: str + ms: int + user_id: str diff --git a/schemas/api_module.py b/schemas/api_module.py new file mode 100644 index 0000000..2aee95e --- /dev/null +++ b/schemas/api_module.py @@ -0,0 +1,18 @@ +from typing import Any, List, Union + +from pydantic import BaseModel, Field + +from schemas import DBBase +from typing import Optional + + +class Url_module(BaseModel): + auth_id: str = None + path_name: str = None + api_list: List[str] = None + api_name: List[str] = None + state: List[bool] = None + +class Add_module(BaseModel): + auth_id: str + url:str \ No newline at end of file diff --git a/schemas/authotity.py b/schemas/authotity.py new file mode 100644 index 0000000..88aa35e --- /dev/null +++ b/schemas/authotity.py @@ -0,0 +1,82 @@ +from enum import Enum +from typing import List + +from pydantic import BaseModel + + +class AddRoleForUserInDomain(BaseModel): + username: str + role_id: str + game: str + auth_id: str + + +class AddRoleForUsersInDomain(BaseModel): + data: List[AddRoleForUserInDomain] + + +class GetPermissionsForUserInDomain(BaseModel): + role_id: str + game: str + + +class DeleteRolesForUserInDomain(BaseModel): + username: str + role_id: str + game: str + + +class Policy(BaseModel): + role_id: str + game: str + path: str + act: str = '*' + + +class AddPolicy(BaseModel): + path_list: List[str] + role_id: str + game: str + act: str = '*' + + +class DelPolicy(Policy): + pass + + +class Ptype(str, Enum): + p = 'p' + g = 'g' + + +class CasbinRoleCreate(BaseModel): + role_name: str + role_api: List[str] + + +class CasbinDB(BaseModel): + ptype: Ptype + v0: str + v1: str + v2: str + + +class AccountCreate(BaseModel): + username: str + role_name: str + # nickname: str + data_auth_id: str + + +class AccountsCreate(BaseModel): + accounts: List[AccountCreate] + project_id: str + + +class AccountDeleteUser(BaseModel): + name: str + + +class AccountSetRole(BaseModel): + name: str + role_name: str diff --git a/schemas/base.py b/schemas/base.py new file mode 100644 index 0000000..40c5718 --- /dev/null +++ b/schemas/base.py @@ -0,0 +1,28 @@ +import uuid +from typing import Optional, Union + +from bson import ObjectId +from pydantic import BaseModel, Field, validator +from utils import * + + +# # mongodb _id 类型 +# class OId(ObjectId): +# @classmethod +# def __get_validators__(cls): +# yield cls.validate +# +# @classmethod +# def validate(cls, v): +# try: +# return ObjectId(v) +# except: +# raise ValueError('无效的格式') + + +class DBBase(BaseModel): + id: str = Field(None, alias='_id') + + @validator('id', pre=True, always=True) + def default_id(cls, v): + return v or get_uid() diff --git a/schemas/check_data.py b/schemas/check_data.py new file mode 100644 index 0000000..fdb303e --- /dev/null +++ b/schemas/check_data.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel + + +class CheckData(BaseModel): + db_name: str + event_name: str + is_unique: bool + props: dict + default_field: dict = dict() + where: dict = dict() + game:str + + +class AddTemplate(BaseModel): + check: CheckData + title: str + + +class DelTemplate(BaseModel): + title: str diff --git a/schemas/dashboard.py b/schemas/dashboard.py new file mode 100644 index 0000000..2130e9c --- /dev/null +++ b/schemas/dashboard.py @@ -0,0 +1,105 @@ +import uuid +from datetime import datetime +from enum import Enum +from typing import List, Dict + +from pydantic import BaseModel + +from schemas import DBBase + + +class DashboardBase(BaseModel): + name: str = None + + +# 解析请求json 创建项目 +class DashboardCreate(DashboardBase): + name: str + project_id: str + # cat: str + pid: str + + +class ReadDashboard(BaseModel): + id: str + + +class DashboardDelete(BaseModel): + ids: List[str] + + +class Report(BaseModel): + name: str = None + report_id: str = None + graph_type: str = None + ascending: bool = None + model: str = None + graph_size: str = None + sort: int = None + modelswitch: bool = None + avesumdata: bool = True + daydata: bool = True + reverseorder: bool = True + + +class EditShowReport(BaseModel): + dashboard_id: str + config: Report + + +class Category(str, Enum): + project = 'kanban' + space = 'space' + + +class EditDashboard(BaseModel): + dashboard_id: str + new_name: str + + +class DashboardMove(BaseModel): + source_ids: List[str] + dest_pid: str + cat: Category + + +class Sort(BaseModel): + dashboard_id: str + sort: int + + +class DashboardSort(BaseModel): + sort: List[Sort] + + +class DashboardCopy(BaseModel): + source_ids: List[str] + dest_project_id: str + +class DashboardCopyToSpace(BaseModel): + source_ids: List[str] + project_id: str + dest_space_id: str + +class AddReport(DBBase): + report_ids: List[Report] + + +class DelReport(DBBase): + report_id: str + + +class EditReport(DBBase): + report: Report + + +# -------------------------------------------------------------- +# 数据库模型 +class DashboardDB(DBBase): + name: str + user_id: str + project_id: str + # cat: Category + reports: List[str] = [] + pid: str + create_date: datetime = datetime.now() diff --git a/schemas/data_attr.py b/schemas/data_attr.py new file mode 100644 index 0000000..d85d382 --- /dev/null +++ b/schemas/data_attr.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel + + +class DataAttrEdit(BaseModel): + name: str + show_name: str + is_show: bool + cat: str + +class Add_attr(BaseModel): + cat: str + new_attribute: str + state: str + data_type: str \ No newline at end of file diff --git a/schemas/data_auth.py b/schemas/data_auth.py new file mode 100644 index 0000000..4662b37 --- /dev/null +++ b/schemas/data_auth.py @@ -0,0 +1,24 @@ +from typing import List + +from pydantic import BaseModel + + +class DataAuthCreate(BaseModel): + title: str + data: List[str] = [] + + +class DataAuthEdit(BaseModel): + data_auth_id: str + title: str + data: List[str] = [] + + +class DataAuthSet(BaseModel): + username: str + data_auth_id: str + + +class LoadProQuotas(BaseModel): + event_name: str + model: str = None diff --git a/schemas/event_list.py b/schemas/event_list.py new file mode 100644 index 0000000..5a69502 --- /dev/null +++ b/schemas/event_list.py @@ -0,0 +1,10 @@ +from typing import List, Dict +from pydantic import BaseModel + +class Event_list(BaseModel): + game:str + details:List[Dict] + +class Details(BaseModel): + event:str + event_name:str \ No newline at end of file diff --git a/schemas/event_mana.py b/schemas/event_mana.py new file mode 100644 index 0000000..806c2d8 --- /dev/null +++ b/schemas/event_mana.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class EventMateEdit(BaseModel): + event_name: str + show_name: str + is_show: bool + desc: str diff --git a/schemas/folder.py b/schemas/folder.py new file mode 100644 index 0000000..4868af6 --- /dev/null +++ b/schemas/folder.py @@ -0,0 +1,41 @@ +import uuid +from datetime import datetime +from enum import Enum +from typing import List + +from pydantic import BaseModel + +from schemas import DBBase + + +class FolderBase(BaseModel): + name: str = None + + +# 解析请求json 创建项目 +class FolderCreate(FolderBase): + name: str + project_id: str + cat: str + pid: str + + +class FolderDelete(DBBase): + pass + + +class Category(str, Enum): + project = 'kanban' + space = 'space' + + +# -------------------------------------------------------------- +# 数据库模型 +class FolderDB(DBBase): + name: str + user_id: str + project_id: str + cat: Category + pid: str + members: List[str] = [] + create_date: datetime = datetime.now() diff --git a/schemas/msg.py b/schemas/msg.py new file mode 100644 index 0000000..7721feb --- /dev/null +++ b/schemas/msg.py @@ -0,0 +1,9 @@ +from typing import Any + +from pydantic import BaseModel + + +class Msg(BaseModel): + code: int + msg: str + data: Any diff --git a/schemas/proid_map.py b/schemas/proid_map.py new file mode 100644 index 0000000..e69de29 diff --git a/schemas/project.py b/schemas/project.py new file mode 100644 index 0000000..08f7206 --- /dev/null +++ b/schemas/project.py @@ -0,0 +1,71 @@ +import uuid +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, Field + +from schemas import DBBase + + +class ProjectBase(BaseModel): + name: str = None + + +class MemberRole(BaseModel): + username: str + user_id: str + role_name: str + data_auth_id: str + + +class ProjectAddMember(BaseModel): + members: List[MemberRole] + project_id: str + + +class ProjectMember(BaseModel): + members: List[str] + project_id: str + + +class ProjectDetail(BaseModel): + project_id: str + + +class ProjectClean(BaseModel): + project_id: str + +class Import_project(BaseModel): + game: str + games: str + +class ProjectRename(BaseModel): + project_id: str + rename: str + + +class ProjectDelMember(BaseModel): + project_id: str + role: str + username: str + + +# 解析请求json 创建项目 +class ProjectCreate(ProjectBase): + name: str = Field(..., title='项目名') + game: str = Field(..., title='游戏代号') + #qudao:str = Field(...,title='渠道') + +# 查询某个项目看板 +class ProjectKanban(DBBase): + pass + + +# -------------------------------------------------------------- +# 数据库模型 +class ProjectDB(DBBase): + name: str + game: str + user_id: str + members: List[str] = [] + create_date: datetime = datetime.now() diff --git a/schemas/project_number.py b/schemas/project_number.py new file mode 100644 index 0000000..6fda869 --- /dev/null +++ b/schemas/project_number.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel +from typing import List + + +class ProjectnumberList(BaseModel): + main_channel: str + ditch: str + + +class ProjectnumberInsert(BaseModel): + game: str + ditch: List[ProjectnumberList] + name: str + +class AddProjectnumber(BaseModel): + game: str + ditch: List[ProjectnumberInsert] diff --git a/schemas/report.py b/schemas/report.py new file mode 100644 index 0000000..ff35a81 --- /dev/null +++ b/schemas/report.py @@ -0,0 +1,57 @@ +import json +import uuid +from datetime import datetime +from enum import Enum +from typing import List + +from pydantic import BaseModel, validator, Json + +from schemas import DBBase + + +class ReportBase(BaseModel): + name: str = None + query: str = None + project_id: str = None + + +class ReportCreate(ReportBase): + name: str + desc: str + project_id: str + query: dict + cat: str + + +class ReportEdit(BaseModel): + report_id: str + query: dict + name: str + desc: str + + +class ReportCopy(BaseModel): + report_ids: List[str] + dest_project_id: str + + +class ReportDelete(DBBase): + pass + + +class ReportRead(BaseModel): + project_id: str + report_id: List = [] + dashboard_id: str = None + + +# -------------------------------------------------------------- +# 数据库模型 +class ReportDB(DBBase): + name: str + user_id: str + project_id: str + desc: str + query: dict + cat: str + create_date: datetime = datetime.now() diff --git a/schemas/role.py b/schemas/role.py new file mode 100644 index 0000000..c6c0d3b --- /dev/null +++ b/schemas/role.py @@ -0,0 +1,37 @@ +from typing import List + +from pydantic import Field +from pydantic.main import BaseModel + +from schemas import DBBase + + +class RoleBase(BaseModel): + game: str = None + name: str = None + desc: str = None + + +class AddRole(BaseModel): + game: str + name: str + desc: str + + +class AddRoleDB(DBBase, AddRole): + pass + + +class DelRole(BaseModel): + ids: List[str] = Field(..., description='要删除的id') + + +class EditRole(BaseModel): + role_id: str = Field(..., description='要编辑的id') + name: str = None + desc: str = None + + +class OwnerList(BaseModel): + owners: list + account_name: str diff --git a/schemas/select_map.py b/schemas/select_map.py new file mode 100644 index 0000000..c064b8a --- /dev/null +++ b/schemas/select_map.py @@ -0,0 +1,13 @@ +from typing import Any, List, Union, Dict + +from pydantic import BaseModel, Field + + +class SelectMap(BaseModel): + game: str + attr_name: str + map_: List[Dict] + + +class SelectAttr(BaseModel): + attr_name: str diff --git a/schemas/space.py b/schemas/space.py new file mode 100644 index 0000000..0d79262 --- /dev/null +++ b/schemas/space.py @@ -0,0 +1,59 @@ +import uuid +from datetime import datetime +from enum import Enum +from typing import List + +from pydantic import BaseModel + +from schemas import DBBase + + +class SpaceBase(BaseModel): + name: str = None + + +class Authority(str, Enum): + rw = 'rw' + r = 'r' + + +class Member(BaseModel): + user_id: str + authority: Authority + + +# 解析请求json 创建项目 +class SpaceCreate(SpaceBase): + name: str + project_id: str + members: List[Member] = [] + is_all_member: bool = False + authority: Authority = 'r' + + +class SpaceDelete(DBBase): + pass + + +class SpaceDetail(BaseModel): + space_id: str + + +class SpaceRename(BaseModel): + space_id: str + new_name: str + + +class AddSpaceMembers(BaseModel): + space_id: str + members: List[Member] + + +# -------------------------------------------------------------- +# 数据库模型 +class SpaceDB(DBBase): + name: str + user_id: str + project_id: str + members: List[Member] = [] + create_date: datetime = datetime.now() diff --git a/schemas/sql.py b/schemas/sql.py new file mode 100644 index 0000000..84613d7 --- /dev/null +++ b/schemas/sql.py @@ -0,0 +1,37 @@ +from typing import List, Union, Dict + +from pydantic import BaseModel +from typing import Optional + + +class Sql(BaseModel): + sql: str + + +class CkQuery(BaseModel): + eventView: dict = None + events: Union[List[dict], dict] = None + report_id: str = None + ext_filter: dict = dict() + +class Ck_seek_user(BaseModel): + user_arrt_title: str # 用户属性 + user_arrt_id: str # 用户属性id + user_arrt_type: str # 用户属性type + comparator_title: str # 筛选条件 + comparator_id: str # 筛选条件id + condition: str # 手动输入条件,区间用~符号隔开,如0~10 + start_time: str # 开始时间 + end_time: str # 结束时间 + pages: int = 1 # 分页的当前页 + + +class Ck_solo_user(BaseModel): + account_id : str # #account_id + start_time: str # 开始时间 例:2022-04-02 + end_time: str # 结束时间 + event_list: List[Dict] =None#事件名 + +class Times(BaseModel): + start_time: str # 开始时间 例:2022-04-02 00:00:00 + end_time: str # 结束时间 \ No newline at end of file diff --git a/schemas/table_struct.py b/schemas/table_struct.py new file mode 100644 index 0000000..b9f76e9 --- /dev/null +++ b/schemas/table_struct.py @@ -0,0 +1,13 @@ +from enum import Enum + +from pydantic import BaseModel + + +class TableEnum(str, Enum): + event = 'event' + user = 'user' + + +class GetTable(BaseModel): + + name: TableEnum diff --git a/schemas/token.py b/schemas/token.py new file mode 100644 index 0000000..49c3e66 --- /dev/null +++ b/schemas/token.py @@ -0,0 +1,14 @@ +from typing import Optional + +from pydantic import BaseModel + + +class Token(BaseModel): + token: str + code: int + name: str + email: str + msg: str + + + diff --git a/schemas/url_list.py b/schemas/url_list.py new file mode 100644 index 0000000..fc12a1c --- /dev/null +++ b/schemas/url_list.py @@ -0,0 +1,61 @@ +from typing import Any, List, Union + +from pydantic import BaseModel, Field + +from schemas import DBBase +from typing import Optional + + +class Url_list(BaseModel): + name: str = None + auth_id: str = None + path_name: str = None + api_list: List[str] = None + api_name: List[str] = None + state: List[bool] = None + system: int = None + + +class Url_lists(BaseModel): + name: str = None + auth_id: str = None + path_name: str = None + api_list: List[str] = None + api_name: List[str] = None + state: List[bool] = None + system: int = None + game: str = None + + +class Url_data(BaseModel): + api_list: List[str] = None + api_name: List[str] = None + path_name: str = None + stath_name: List[bool] = None + + +class Datalist(BaseModel): + path: str + path_name: str + role_id: str + system: int + + +class Add_role(BaseModel): + path_name: List[str] + system: int + name: str + +class Del_role(BaseModel): + path: str + path_name: str + role_id: str +class Editname(BaseModel): + role_id: str #= Field(..., description='要编辑的id') + name: str = None + desc: str = None + +class Del_roles(BaseModel): + game:str + role_id:str + username:str \ No newline at end of file diff --git a/schemas/user.py b/schemas/user.py new file mode 100644 index 0000000..f8af986 --- /dev/null +++ b/schemas/user.py @@ -0,0 +1,68 @@ +from typing import Optional, List, Any + +from schemas.base import DBBase + +from pydantic import BaseModel, EmailStr, Field + + +class UserBase(BaseModel): + email: Optional[EmailStr] = None + is_superuser: bool = False + name: Optional[str] = None + nickname: str = '' + last_login_ts: str = '尚未登录' + tel: str = '' + + +class UserProfileEdit(BaseModel): + nickname: str = None + tel: str = None + + +class User(UserBase): + name: str + + +class Users(BaseModel): + data: List[User] = [] + + +class CreateAccount(BaseModel): + account_list: List[str] = [] + + +class UserLogin(BaseModel): + username: str = ... + password: str = ... + + +class UserRestPassword(BaseModel): + username: str = ... + password: str = ... + + +class UserRestMyPassword(BaseModel): + password: str = ... + + +class UserCreate(UserBase): + password: str + name: str + + +# **************************************************************************** +# mongodb 模型 + + +class UserDB(DBBase): + email: EmailStr = None + is_superuser: bool = False + name: str + nickname: str = '' + tel: Any = '' + last_login_ts: str = '尚未登录' + data_where: dict = dict() + + +class UserDBRW(UserDB): + hashed_password: str diff --git a/schemas/user_url.py b/schemas/user_url.py new file mode 100644 index 0000000..de9017d --- /dev/null +++ b/schemas/user_url.py @@ -0,0 +1,14 @@ +from typing import Any, List, Union + +from pydantic import BaseModel, Field + +from schemas import DBBase +from typing import Optional + + +class Url_quanxian(BaseModel): + game: List[str] = None + user: str = None + user_id: str = None + quanxian: List[str] = None + quanxian_id:List[str] = None \ No newline at end of file diff --git a/schemas/userlabel.py b/schemas/userlabel.py new file mode 100644 index 0000000..05acf90 --- /dev/null +++ b/schemas/userlabel.py @@ -0,0 +1,62 @@ +from typing import Union, List +from typing import Optional + +from pydantic import BaseModel + + +# +# class QP(BaseModel): +# qp: dict + + +class UserLabelSave(BaseModel): + # project_id: str + cluster_name: str + display_name: str + qp: dict + cluster_type: str + remarks: str + + +class UserLabelDetail(BaseModel): + label_id: str + + +class UserLabelDel(BaseModel): + label_id: str + + +# class UserLabelJson2Sql(BaseModel): +# project_id: str +# cluster_name: str +# display_name: str +# qp: dict +# cluster_type: str +# remarks: str + +class UserLabelJson2Sql(BaseModel): + cluster_name: str + +class ReadClusterUser(BaseModel): + cluster_name: str + page: int = 1 + limit: int = 50 + + +class UserLabelCopy(BaseModel): + label_id_list: List[str] + to_game: str + + +class UserLabelRead(BaseModel): + project_id: str + cluster_name: Optional[str] + label_id: Optional[str] + +# class UserLabel(BaseModel): +# project_id: str +# cluster_name: str +# display_name: str +# qp: dict +# cluster_type: str +# remarks: str diff --git a/schemas/xquery.py b/schemas/xquery.py new file mode 100644 index 0000000..07476ea --- /dev/null +++ b/schemas/xquery.py @@ -0,0 +1,13 @@ +from typing import Any, List +from datetime import date +from pydantic import BaseModel + + +class Overview(BaseModel): + cat: str = 'account' + bundle_id: List[str] = None + os: List[str] = None + sdate: date + edate: date + owner_name: List[str] = None + channel: List[str] = None diff --git a/sql/end_chain.sql b/sql/end_chain.sql new file mode 100644 index 0000000..f66421c --- /dev/null +++ b/sql/end_chain.sql @@ -0,0 +1,44 @@ +with + 'pvp_end' as end_event, + ('pvp_end', 'login') as evnet_all, + '2021-07-01 00:00:00' as start_data, + '2021-07-07 23:59:59' as end_data +select event_chain, + count() as values +from (with + toUInt32(maxIf(`#event_time`, `#event_name` = end_event)) AS end_event_ts, + arraySort( + x -> + x.1, + arrayFilter( + x -> x.1 <= end_event_ts, + groupArray((toUInt32(`#event_time`), `#event_name`)) + ) + ) AS sorted_events, + arrayEnumerate(sorted_events) AS event_idxs, + arrayFilter( + (x, y, z) -> z.1 <= end_event_ts and (z.2 = end_event and y>1800) OR y > 1800, + event_idxs, + arrayDifference(sorted_events.1), + sorted_events + ) AS gap_idxs, + arrayMap(x -> x+1, gap_idxs) AS gap_idxs_, + arrayMap(x -> if(has(gap_idxs_, x), 1,0), event_idxs) AS gap_masks, + arraySplit((x, y) -> y, sorted_events, gap_masks) AS split_events + select `#account_id`, + arrayJoin(split_events) AS event_chain_, + arrayMap(x -> + x.2, event_chain_) AS event_chain, + has(event_chain, end_event) AS has_midway_hit + from (select `#event_time`, `#event_name`, `#account_id` + from shjy.event + where addHours(`#event_time`, 8) >= start_data + and addHours(`#event_time`, 8) <= end_data + and `#event_name` in evnet_all) + group by `#account_id` + HAVING has_midway_hit = 1 + ) +where arrayElement(event_chain, -1) = end_event +GROUP BY event_chain +ORDER BY values desc + diff --git a/sql/huiliu.sql b/sql/huiliu.sql new file mode 100644 index 0000000..aff1b7f --- /dev/null +++ b/sql/huiliu.sql @@ -0,0 +1,26 @@ +with Date(now()) as end_date, + '2021-08-20' as start_date +select + login_date, days, count() as num +from ( + select `#account_id`, arrayJoin(log2) log3, log3.1 as login_date, log3.2 as days + from (select `#account_id`, + login_date, + arrayMap((x, y)->dateDiff('day', x, y) - 1, arraySlice(login_date, 1, length(login_date) - 1), + arraySlice(login_date, 2, length(login_date) - 1)) log1, + arrayMap((x, y)-> (x, y), arraySlice(login_date, 1, length(login_date) - 1), log1) log2 + + from (select `#account_id`, arraySort(groupArray(date)) login_date + from ( + with toDate(addHours(`#event_time`, 8)) as date + select date, `#account_id` + from zhengba.event + where `#event_name` = 'login' + and date >= start_date + and date <= end_date + group by date, `#account_id` + ) + group by `#account_id` + having length(login_date) > 1))) +group by login_date,days +order by login_date,days \ No newline at end of file diff --git a/sql/start_chain.sql b/sql/start_chain.sql new file mode 100644 index 0000000..b69fa2e --- /dev/null +++ b/sql/start_chain.sql @@ -0,0 +1,43 @@ +with + 'create_role' as start_event, + ('create_role', 'pvp_end') as evnet_all, + '2021-06-30 00:00:00' as start_data, + '2021-07-06 23:59:59' as end_data +select event_chain, + count() as values +from (with + toUInt32(minIf(`#event_time`, `#event_name` = start_event)) AS start_event_ts, + arraySort( + x -> + x.1, + arrayFilter( + x -> x.1 >= start_event_ts, + groupArray((toUInt32(`#event_time`), `#event_name`)) + ) + ) AS sorted_events, + arrayEnumerate(sorted_events) AS event_idxs, + arrayFilter( + (x, y, z) -> z.1 >= start_event_ts and (z.2 = start_event OR y > 1800), + event_idxs, + arrayDifference(sorted_events.1), + sorted_events + ) AS gap_idxs, + arrayMap(x -> x, gap_idxs) AS gap_idxs_, + arrayMap(x -> if(has(gap_idxs_, x), 1, 0), event_idxs) AS gap_masks, + arraySplit((x, y) -> y, sorted_events, gap_masks) AS split_events + select `#account_id`, + arrayJoin(split_events) AS event_chain_, + arrayMap(x -> + x.2, event_chain_) AS event_chain, + has(event_chain, start_event) AS has_midway_hit + from (select `#event_time`, `#event_name`, `#account_id` + from shjy.event + where addHours(`#event_time`, 8) >= start_data + and addHours(`#event_time`, 8) <= end_data + and `#event_name` in evnet_all) + group by `#account_id` + HAVING has_midway_hit = 1 + ) +where arrayElement(event_chain, 1) = start_event +GROUP BY event_chain +ORDER BY values desc diff --git a/sql/各渠道新增角色.sql b/sql/各渠道新增角色.sql new file mode 100644 index 0000000..677607b --- /dev/null +++ b/sql/各渠道新增角色.sql @@ -0,0 +1,6 @@ +with '2021-09-15' as date +select owner_name, uniqExact(`#account_id`) as n +from xiangsu.event +where `#event_name` = 'create_account' + and toDate(addHours(`#event_time`, 8)) = date +group by owner_name \ No newline at end of file diff --git a/sql/总览.sql b/sql/总览.sql new file mode 100644 index 0000000..60c6b8f --- /dev/null +++ b/sql/总览.sql @@ -0,0 +1,57 @@ +with 'Android' as os, + 'gmhdtt' as owner, + '2021-07-11' as start_date, + '2021-07-20' as end_date +select date, + `#bundle_id`, + active_num, + new_account_num, + new_account_array, + money, + recharge_account_num, + round(recharge_account_num * 100 / active_num, 2) as pay_rate, + round(money / recharge_account_num, 2) as arppu, + round(money / active_num, 2) as arpu, + arrayMap(x-> x.2, arrayFilter(x->has(new_account_array, x.1), bid_money)) as new_recharge, + arraySum(new_recharge) as new_money, + length(new_recharge) as new_pay_num, + round(new_pay_num*100/new_account_num,2) as new_pay_rate, + round(new_money*100/new_pay_num,2) as new_arppu, + round(new_money*100/new_account_num,2) as new_arpu +from (select date, + `#bundle_id`, + active_num, + new_account_num, + new_account_array, + money, + recharge_account_num + from (select date, `#bundle_id`, active_num, new_account_num, new_account_array + from (select date, `#bundle_id`, sum(num) as active_num + from zhengba.active_account + where date >= start_date + and date <= end_date + and `#os` = os + and owner_name = owner + group by date, `#bundle_id`) as active_tbl + left join (select date, + `#bundle_id`, + sum(num) as new_account_num, + flatten(groupArray(account)) as new_account_array + from zhengba.new_account + where date >= start_date + and date <= end_date + and `#os` = os + and owner_name = owner + group by date, `#bundle_id`) as new_account_tbl + on active_tbl.date = new_account_tbl.date and + active_tbl.`#bundle_id` = new_account_tbl.`#bundle_id`) as tb1 + left join (select date, `#bundle_id`, sum(money) as money, sum(account_num) as recharge_account_num + from zhengba.recharge_game + where date >= start_date + and date <= end_date + and `#os` = os + and owner_name = owner + group by date, `#bundle_id`) as recharge_tbl + on recharge_tbl.date = tb1.date and tb1.`#bundle_id` = recharge_tbl.`#bundle_id`) as tb2 + left join zhengba.new_account_recharge as tb3 on tb2.date = tb3.date +order by date desc diff --git a/sql/新增付费.sql b/sql/新增付费.sql new file mode 100644 index 0000000..c7f7351 --- /dev/null +++ b/sql/新增付费.sql @@ -0,0 +1,17 @@ +create view new_account_recharge as (select date, + arrayMap((x, y) -> (x, y),groupArray(binduid), groupArray(money)) as bid_money +from (select date, binduid, money + from (select date, account, binduid, money + from (SELECT toDate(addHours(`#event_time`, `#zone_offset`)) AS date, + arrayDistinct(groupArray(binduid)) AS account + FROM zhengba.event + WHERE role_idx = 1 + GROUP BY toDate(addHours(`#event_time`, `#zone_offset`))) as tb1 + left join (select toDate(addHours(`#event_time`, `#zone_offset`)) as date, + binduid, + sum(money) as money + from zhengba.event + where `#event_name` = 'rechargeGame' + group by toDate(addHours(`#event_time`, `#zone_offset`)), binduid ) as tb2 + on tb1.date = tb2.date) + where has(account, binduid)) group by date) \ No newline at end of file diff --git a/sql/留存.sql b/sql/留存.sql new file mode 100644 index 0000000..e69280d --- /dev/null +++ b/sql/留存.sql @@ -0,0 +1,29 @@ +-- 无分组 +with 'create_account' as start_event, + 'login' as retuen_visit, + `#account_id` as visit, + toDate(addHours(`#event_time`, 8)) as date + +select reg_date, + cnt1, + if(dateDiff('day', reg_date, toDate(now())) >= 2, toString(round(cnt2 * 100 / cnt1, 2)), '-') as `2留`, + if(dateDiff('day', reg_date, toDate(now())) >= 3, toString(round(cnt3 * 100 / cnt1, 2)), '-') as `3留`, + if(dateDiff('day', reg_date, toDate(now())) >= 4, toString(round(cnt4 * 100 / cnt1, 2)), '-') as `4留`, + if(dateDiff('day', reg_date, toDate(now())) >= 5, toString(round(cnt5 * 100 / cnt1, 2)), '-') as `5留`, + if(dateDiff('day', reg_date, toDate(now())) >= 6, toString(round(cnt6 * 100 / cnt1, 2)), '-') as `6留`, + if(dateDiff('day', reg_date, toDate(now())) >= 7, toString(round(cnt7 * 100 / cnt1, 2)), '-') as `7留` + + from(select date, uniqExact(visit) as cnt1 from zhengba.event +where `#event_name` = start_event +group by date) reg left join +(select a.reg_date, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=1,1,0)) as cnt2, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=2,1,0)) as cnt3, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=3,1,0)) as cnt4, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=4,1,0)) as cnt5, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=5,1,0)) as cnt6, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=6,1,0)) as cnt7 +from (select date as reg_date, visit from zhengba.event where `#event_name` = start_event group by reg_date, visit) a + left join (select date as visit_date, visit from zhengba.event where `#event_name` = retuen_visit group by visit_date, visit) b on +a.visit = b.visit +group by a.reg_date) log on reg.date=log.reg_date diff --git a/sql/留存带分组.sql b/sql/留存带分组.sql new file mode 100644 index 0000000..d920156 --- /dev/null +++ b/sql/留存带分组.sql @@ -0,0 +1,30 @@ +with 'create_account' as start_event, + 'login' as retuen_visit, + `#account_id` as visit, + toDate(addHours(`#event_time`, 8)) as date + +select reg_date, + owner_name, + cnt1, + if(dateDiff('day', reg_date, toDate(now())) >= 2, toString(round(cnt2 * 100 / cnt1, 2)), '-') as `2留`, + if(dateDiff('day', reg_date, toDate(now())) >= 3, toString(round(cnt3 * 100 / cnt1, 2)), '-') as `3留`, + if(dateDiff('day', reg_date, toDate(now())) >= 4, toString(round(cnt4 * 100 / cnt1, 2)), '-') as `4留`, + if(dateDiff('day', reg_date, toDate(now())) >= 5, toString(round(cnt5 * 100 / cnt1, 2)), '-') as `5留`, + if(dateDiff('day', reg_date, toDate(now())) >= 6, toString(round(cnt6 * 100 / cnt1, 2)), '-') as `6留`, + if(dateDiff('day', reg_date, toDate(now())) >= 7, toString(round(cnt7 * 100 / cnt1, 2)), '-') as `7留` + + from(select date,owner_name, uniqExact(visit) as cnt1 from zhengba.event +where `#event_name` = start_event +group by date,owner_name) reg left join +(select a.reg_date,owner_name, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=1,1,0)) as cnt2, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=2,1,0)) as cnt3, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=3,1,0)) as cnt4, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=4,1,0)) as cnt5, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=5,1,0)) as cnt6, + sum(if(dateDiff('day',a.reg_date,b.visit_date)=6,1,0)) as cnt7 +from (select date as reg_date,owner_name, visit from zhengba.event where `#event_name` = start_event group by reg_date, visit,owner_name) a + left join (select date as visit_date,owner_name, visit from zhengba.event where `#event_name` = retuen_visit group by visit_date, visit,owner_name) b on +a.visit = b.visit and a.owner_name=b.owner_name +group by a.reg_date,a.owner_name) log on reg.date=log.reg_date and reg.owner_name=log.owner_name +order by reg_date \ No newline at end of file diff --git a/update_api_list.py b/update_api_list.py new file mode 100644 index 0000000..40832b5 --- /dev/null +++ b/update_api_list.py @@ -0,0 +1,408 @@ +data = { + "code": 0, + "msg": "ok", + "data": [ + { + "title": "test", + "list": [ + { + "api": "/api/v1/test/test", + "title": "api 列表" + } + ] + }, + { + "title": "用户接口", + "list": [ + { + "api": "/api/v1/user/login", + "title": "OAuth2兼容令牌登录,获取将来令牌的访问令牌" + }, + { + "api": "/api/v1/user/me", + "title": "Test access token" + }, + { + "api": "/api/v1/user/reset_password", + "title": "修改其他人密码" + }, + { + "api": "/api/v1/user/reset_my_password", + "title": "修改自己的密码" + }, + { + "api": "/api/v1/user/edit_profile", + "title": "编辑用户资料" + }, + { + "api": "/api/v1/user/all_account", + "title": "获取所有用户" + }, + { + "api": "/api/v1/user/add_account", + "title": "创建新账号" + } + ] + }, + { + "title": "项目接口", + "list": [ + { + "api": "/api/v1/project/create", + "title": "创建项目" + }, + { + "api": "/api/v1/project/", + "title": "查看自己拥有的项目" + }, + { + "api": "/api/v1/project/detail", + "title": "查看项目信息" + }, + { + "api": "/api/v1/project/rename", + "title": "修改项目名" + }, + { + "api": "/api/v1/project/add_members", + "title": "项目添加成员" + }, + { + "api": "/api/v1/project/edit_member", + "title": "编辑成员权限 角色和数据" + }, + { + "api": "/api/v1/project/members", + "title": "查看项目成员" + }, + { + "api": "/api/v1/project/kanban", + "title": "获取自己的看板" + } + ] + }, + { + "title": "文件夹接口", + "list": [ + { + "api": "/api/v1/folder/create", + "title": "创建文件夹" + }, + { + "api": "/api/v1/folder/delete", + "title": "删除文件夹" + } + ] + }, + { + "title": "空间接口", + "list": [ + { + "api": "/api/v1/space/create", + "title": "创建空间" + }, + { + "api": "/api/v1/space/delete", + "title": "删除空间" + }, + { + "api": "/api/v1/space/rename", + "title": "重命名空间" + }, + { + "api": "/api/v1/space/set_members", + "title": "设置空间成员" + }, + { + "api": "/api/v1/space/detail", + "title": "空间详细" + } + ] + }, + { + "title": "看板接口", + "list": [ + { + "api": "/api/v1/dashboard/create", + "title": "创建看板" + }, + { + "api": "/api/v1/dashboard/edit_show_report", + "title": "" + }, + { + "api": "/api/v1/dashboard/delete", + "title": "删除看板" + }, + { + "api": "/api/v1/dashboard/move", + "title": "移动看板" + }, + { + "api": "/api/v1/dashboard/copy", + "title": "复制到其他项目" + }, + { + "api": "/api/v1/dashboard/add_report", + "title": "添加报表" + }, + { + "api": "/api/v1/dashboard/del_report", + "title": "删除报表" + }, + { + "api": "/api/v1/dashboard/edit", + "title": "编辑看板名" + }, + { + "api": "/api/v1/dashboard/", + "title": "获取一个看板" + } + ] + }, + { + "title": "报表接口", + "list": [ + { + "api": "/api/v1/report/create", + "title": "新建报表" + }, + { + "api": "/api/v1/report/edit", + "title": "编辑报表" + }, + { + "api": "/api/v1/report/copy", + "title": "复制报表到其他项目" + }, + { + "api": "/api/v1/report/read_report", + "title": "获取已建报表" + }, + { + "api": "/api/v1/report/delete", + "title": "删除报表" + } + ] + }, + { + "title": "数据权限", + "list": [ + { + "api": "/api/v1/data_auth/add_data_auth", + "title": "创建数据权限" + }, + { + "api": "/api/v1/data_auth/edit_data_auth", + "title": "修改数据权限" + }, + { + "api": "/api/v1/data_auth/quotas_map", + "title": "" + }, + { + "api": "/api/v1/data_auth/filter_map", + "title": "" + }, + { + "api": "/api/v1/data_auth/all_event", + "title": "获取所有事件" + }, + { + "api": "/api/v1/data_auth/list", + "title": "获取前项目数据权限" + }, + { + "api": "/api/v1/data_auth/my_event", + "title": "获取自己的事件权限" + }, + { + "api": "/api/v1/data_auth/user_property", + "title": "获取用户属性" + }, + { + "api": "/api/v1/data_auth/load_prop_quotas", + "title": "事件属性 聚合条件" + }, + { + "api": "/api/v1/data_auth/load_filter_props", + "title": "事件属性 过滤条件" + } + ] + }, + { + "title": "数据管理", + "list": [ + { + "api": "/api/v1/data_mana/attr_list", + "title": "事件属性列表" + }, + { + "api": "/api/v1/data_mana/attr_edit", + "title": "编辑事件属性" + }, + { + "api": "/api/v1/data_mana/event_list", + "title": "事件列表" + }, + { + "api": "/api/v1/data_mana/event_edit", + "title": "编辑事件" + } + ] + }, + { + "title": "ck", + "list": [ + { + "api": "/api/v1/ck/sql", + "title": "原 sql 查询 " + }, + { + "api": "/api/v1/ck/event_model_sql", + "title": "事件分析模型 sql" + }, + { + "api": "/api/v1/ck/event_model", + "title": "事件分析" + }, + { + "api": "/api/v1/ck/retention_model_sql", + "title": "留存查询 sql" + }, + { + "api": "/api/v1/ck/retention_model", + "title": "留存数据模型" + }, + { + "api": "/api/v1/ck/funnel_model_sql", + "title": "漏斗数据模型 sql" + }, + { + "api": "/api/v1/ck/funnel_model", + "title": "漏斗数据模型" + }, + { + "api": "/api/v1/ck/scatter_model_sql", + "title": "分布分析 sql" + }, + { + "api": "/api/v1/ck/scatter_model", + "title": "分布分析 模型" + }, + { + "api": "/api/v1/ck/trace_model_sql", + "title": "路径分析 sql" + }, + { + "api": "/api/v1/ck/trace_model", + "title": "路径分析" + }, + { + "api": "/api/v1/ck/user_property_model_sql", + "title": "用户属性sql" + }, + { + "api": "/api/v1/ck/user_property_model", + "title": "用户属性分析" + } + ] + }, + { + "title": "xck", + "list": [ + { + "api": "/api/v1/ck/ltv_model_sql", + "title": "ltv模型sql " + }, + { + "api": "/api/v1/ck/ltv_model", + "title": "ltv模型sql " + } + ] + }, + { + "title": "api接口管理", + "list": [ + { + "api": "/api/v1/authz/add_role_domain", + "title": "在域内为用户添加角色" + }, + { + "api": "/api/v1/authz/get_permissions_for_user_in_domain", + "title": "获取域内用户或角色的权限" + }, + { + "api": "/api/v1/authz/del_role_user_domain", + "title": "删除用户角色域" + }, + { + "api": "/api/v1/authz/add_policy", + "title": "向当前策略添加授权规则" + }, + { + "api": "/api/v1/authz/del_policy", + "title": "删除角色api权限" + }, + { + "api": "/api/v1/authz/api_list", + "title": "GetPermissionsForUserInDomain\n所有的api" + }, + { + "api": "/api/v1/authz/add_api", + "title": "添加api" + }, + { + "api": "/api/v1/authz/del_api", + "title": "删除api" + }, + { + "api": "/api/v1/authz/edit_api", + "title": "编辑api" + }, + { + "api": "/api/v1/authz/domain", + "title": "可选择域 游戏代号" + }, + { + "api": "/api/v1/authz/add_roles", + "title": "新建角色" + }, + { + "api": "/api/v1/authz/roles", + "title": "域内所有角色" + }, + { + "api": "/api/v1/authz/edit_role", + "title": "修改角色名" + } + ] + } + ] +} + +import crud +import schemas +from core.config import settings + +# 创建一个超级用户、、 +from db import connect_to_mongo, get_database +import asyncio + +connect_to_mongo() +db = get_database() + + +async def main(): + await crud.api_list.delete(db,{}) + for item in data['data']: + title = item['title'] + for l in item['list']: + api = l['api'] + name = l['title'] + add_data = schemas.AddApi(path=api, name=name, desc='') + await crud.api_list.add_api(db, add_data) + + +loop = asyncio.get_event_loop() +loop.run_until_complete(main()) diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..88ac30c --- /dev/null +++ b/utils.py @@ -0,0 +1,107 @@ +import json +import logging +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, Dict, Optional + +import emails +from emails.template import JinjaTemplate +from jose import jwt + +from core.config import settings + + +def send_email( + email_to: str, + subject_template: str = "", + html_template: str = "", + environment: Dict[str, Any] = {}, +) -> None: + assert settings.EMAILS_ENABLED, "no provided configuration for email variables" + message = emails.Message( + subject=JinjaTemplate(subject_template), + html=JinjaTemplate(html_template), + mail_from=(settings.EMAILS_FROM_NAME, settings.EMAILS_FROM_EMAIL), + ) + smtp_options = {"host": settings.SMTP_HOST, "port": settings.SMTP_PORT} + if settings.SMTP_TLS: + smtp_options["tls"] = True + if settings.SMTP_USER: + smtp_options["user"] = settings.SMTP_USER + if settings.SMTP_PASSWORD: + smtp_options["password"] = settings.SMTP_PASSWORD + response = message.send(to=email_to, render=environment, smtp=smtp_options) + logging.info(f"send email result: {response}") + + +def send_test_email(email_to: str) -> None: + project_name = settings.PROJECT_NAME + subject = f"{project_name} - Test email" + with open(Path(settings.EMAIL_TEMPLATES_DIR) / "test_email.html") as f: + template_str = f.read() + send_email( + email_to=email_to, + subject_template=subject, + html_template=template_str, + environment={"project_name": settings.PROJECT_NAME, "email": email_to}, + ) + + +def send_reset_password_email(email_to: str, email: str, token: str) -> None: + project_name = settings.PROJECT_NAME + subject = f"{project_name} - Password recovery for user {email}" + with open(Path(settings.EMAIL_TEMPLATES_DIR) / "reset_password.html") as f: + template_str = f.read() + server_host = settings.SERVER_HOST + link = f"{server_host}/reset-password?token={token}" + send_email( + email_to=email_to, + subject_template=subject, + html_template=template_str, + environment={ + "project_name": settings.PROJECT_NAME, + "username": email, + "email": email_to, + "valid_hours": settings.EMAIL_RESET_TOKEN_EXPIRE_HOURS, + "link": link, + }, + ) + + +def send_new_account_email(email_to: str, username: str, password: str) -> None: + project_name = settings.PROJECT_NAME + subject = f"{project_name} - New account for user {username}" + with open(Path(settings.EMAIL_TEMPLATES_DIR) / "new_account.html") as f: + template_str = f.read() + link = settings.SERVER_HOST + send_email( + email_to=email_to, + subject_template=subject, + html_template=template_str, + environment={ + "project_name": settings.PROJECT_NAME, + "username": username, + "password": password, + "email": email_to, + "link": link, + }, + ) + + +def generate_password_reset_token(email: str) -> str: + delta = timedelta(hours=settings.EMAIL_RESET_TOKEN_EXPIRE_HOURS) + now = datetime.utcnow() + expires = now + delta + exp = expires.timestamp() + encoded_jwt = jwt.encode( + {"exp": exp, "nbf": now, "sub": email}, settings.SECRET_KEY, algorithm="HS256", + ) + return encoded_jwt + + +def verify_password_reset_token(token: str) -> Optional[str]: + try: + decoded_token = jwt.decode(token, settings.SECRET_KEY, algorithms=["HS256"]) + return decoded_token.get('user_id') + except jwt.JWTError: + return None diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..c6019dd --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1,5 @@ +from .adapter import * + +from . import casbin +from .func import * +from .export import * diff --git a/utils/casbin/__init__.py b/utils/casbin/__init__.py new file mode 100644 index 0000000..9991b3a --- /dev/null +++ b/utils/casbin/__init__.py @@ -0,0 +1,7 @@ +from .enforcer import * +from .synced_enforcer import SyncedEnforcer +from .distributed_enforcer import DistributedEnforcer +from . import util +from .persist import * +from .effect import * +from .model import * \ No newline at end of file diff --git a/utils/casbin/config/__init__.py b/utils/casbin/config/__init__.py new file mode 100644 index 0000000..3558f42 --- /dev/null +++ b/utils/casbin/config/__init__.py @@ -0,0 +1 @@ +from .config import Config \ No newline at end of file diff --git a/utils/casbin/config/config.py b/utils/casbin/config/config.py new file mode 100644 index 0000000..ac91a79 --- /dev/null +++ b/utils/casbin/config/config.py @@ -0,0 +1,151 @@ +from io import StringIO + + +class Config: + """represents an implementation of the ConfigInterface""" + + # DEFAULT_SECTION specifies the name of a section if no name provided + DEFAULT_SECTION = 'default' + # DEFAULT_COMMENT defines what character(s) indicate a comment `#` + DEFAULT_COMMENT = '#' + # DEFAULT_COMMENT_SEM defines what alternate character(s) indicate a comment `;` + DEFAULT_COMMENT_SEM = ';' + # DEFAULT_MULTI_LINE_SEPARATOR defines what character indicates a multi-line content + DEFAULT_MULTI_LINE_SEPARATOR = '\\' + + _data = dict() + + def __init__(self): + self._data = dict() + + @staticmethod + def new_config(conf_name): + c = Config() + c._parse(conf_name) + return c + + @staticmethod + def new_config_from_text(text): + c = Config() + f = StringIO(text) + c._parse_buffer(f) + return c + + def add_config(self, section, option, value): + if section == '': + section = self.DEFAULT_SECTION + + if section not in self._data.keys(): + self._data[section] = {} + + self._data[section][option] = value + + def _parse(self, fname): + with open(fname, 'r', encoding='utf-8') as f: + self._parse_buffer(f) + + def _parse_buffer(self, f): + section = '' + line_num = 0 + buf = [] + can_write = False + while True: + if can_write: + self._write(section, line_num, buf) + can_write = False + line_num = line_num + 1 + + line = f.readline() + + if not line: + if len(buf) > 0: + self._write(section, line_num, buf) + break + line = line.strip() + + if '' == line or self.DEFAULT_COMMENT == line[0:1] or self.DEFAULT_COMMENT_SEM == line[0:1]: + can_write = True + continue + elif '[' == line[0:1] and ']' == line[-1]: + if len(buf) > 0: + self._write(section, line_num, buf) + can_write = False + section = line[1:-1] + else: + p = '' + if self.DEFAULT_MULTI_LINE_SEPARATOR == line[-1]: + p = line[0:-1].strip() + p = p + ' ' + else: + p = line + can_write = True + buf.append(p) + + def _write(self, section, line_num, b): + + buf = "".join(b) + if len(buf) <= 0: + return + option_val = buf.split('=', 1) + + if len(option_val) != 2: + raise RuntimeError('parse the content error : line {} , {} = ?'.format(line_num, option_val[0])) + + option = option_val[0].strip() + value = option_val[1].strip() + + self.add_config(section, option, value) + + del b[:] + + def get_bool(self, key): + """lookups up the value using the provided key and converts the value to a bool.""" + return self.get(key).capitalize() == "True" + + def get_int(self, key): + """lookups up the value using the provided key and converts the value to a int""" + return int(self.get(key)) + + def get_float(self, key): + """lookups up the value using the provided key and converts the value to a float""" + return float(self.get(key)) + + def get_string(self, key): + """lookups up the value using the provided key and converts the value to a string""" + return self.get(key) + + def get_strings(self, key): + """lookups up the value using the provided key and converts the value to an array of string""" + value = self.get(key) + if value == "": + return None + return value.split(",") + + def set(self, key, value): + if len(key) == 0: + raise RuntimeError("key is empty") + + keys = key.lower().split('::') + if len(keys) >= 2: + section = keys[0] + option = keys[1] + else: + section = "" + option = keys[0] + self.add_config(section, option, value) + + def get(self, key): + """section.key or key""" + + keys = key.lower().split('::') + if len(keys) >= 2: + section = keys[0] + option = keys[1] + else: + section = self.DEFAULT_SECTION + option = keys[0] + + if section in self._data.keys(): + if option in self._data[section].keys(): + return self._data[section][option] + return '' diff --git a/utils/casbin/core_enforcer.py b/utils/casbin/core_enforcer.py new file mode 100644 index 0000000..f4d6cc5 --- /dev/null +++ b/utils/casbin/core_enforcer.py @@ -0,0 +1,373 @@ +import logging + +from utils.casbin.effect import Effector, get_effector, effect_to_bool +from utils.casbin.model import Model, FunctionMap +from utils.casbin.persist import Adapter +from utils.casbin.persist.adapters import FileAdapter +from utils.casbin.rbac import default_role_manager +from utils.casbin.util import generate_g_function, SimpleEval, util + + +class CoreEnforcer: + """CoreEnforcer defines the core functionality of an enforcer.""" + + model_path = "" + model = None + fm = None + eft = None + + adapter = None + watcher = None + rm_map = None + + enabled = False + auto_save = False + auto_build_role_links = False + + def __init__(self, model=None, adapter=None): + self.logger = logging.getLogger() + if isinstance(model, str): + if isinstance(adapter, str): + self.init_with_file(model, adapter) + else: + self.init_with_adapter(model, adapter) + pass + else: + if isinstance(adapter, str): + raise RuntimeError("Invalid parameters for enforcer.") + else: + self.init_with_model_and_adapter(model, adapter) + + def init_with_file(self, model_path, policy_path): + """initializes an enforcer with a model file and a policy file.""" + a = FileAdapter(policy_path) + self.init_with_adapter(model_path, a) + + def init_with_adapter(self, model_path, adapter=None): + """initializes an enforcer with a database adapter.""" + m = self.new_model(model_path) + self.init_with_model_and_adapter(m, adapter) + + self.model_path = model_path + + def init_with_model_and_adapter(self, m, adapter=None): + """initializes an enforcer with a model and a database adapter.""" + + if not isinstance(m, Model) or adapter is not None and not isinstance(adapter, Adapter): + raise RuntimeError("Invalid parameters for enforcer.") + + self.adapter = adapter + + self.model = m + self.model.print_model() + self.fm = FunctionMap.load_function_map() + + self._initialize() + + # Do not initialize the full policy when using a filtered adapter + if self.adapter and not self.is_filtered(): + self.load_policy() + + def _initialize(self): + self.rm_map = dict() + self.eft = get_effector(self.model.model["e"]["e"].value) + self.watcher = None + + self.enabled = True + self.auto_save = True + self.auto_build_role_links = True + + self.init_rm_map() + + @staticmethod + def new_model(path="", text=""): + """creates a model.""" + + m = Model() + if len(path) > 0: + m.load_model(path) + else: + m.load_model_from_text(text) + + return m + + def load_model(self): + """reloads the model from the model CONF file. + Because the policy is attached to a model, so the policy is invalidated and needs to be reloaded by calling LoadPolicy(). + """ + + self.model = self.new_model() + self.model.load_model(self.model_path) + self.model.print_model() + self.fm = FunctionMap.load_function_map() + + def get_model(self): + """gets the current model.""" + + return self.model + + def set_model(self, m): + """sets the current model.""" + + self.model = m + self.fm = FunctionMap.load_function_map() + + def get_adapter(self): + """gets the current adapter.""" + + return self.adapter + + def set_adapter(self, adapter): + """sets the current adapter.""" + + self.adapter = adapter + + def set_watcher(self, watcher): + """sets the current watcher.""" + + self.watcher = watcher + pass + + def get_role_manager(self): + """gets the current role manager.""" + return self.rm_map['g'] + + def set_role_manager(self, rm): + """sets the current role manager.""" + self.rm_map['g'] = rm + + def set_effector(self, eft): + """sets the current effector.""" + + self.eft = eft + + def clear_policy(self): + """ clears all policy.""" + + self.model.clear_policy() + + def init_rm_map(self): + if 'g' in self.model.model.keys(): + for ptype in self.model.model['g']: + self.rm_map[ptype] = default_role_manager.RoleManager(10) + + def load_policy(self): + """reloads the policy from file/database.""" + + self.model.clear_policy() + self.adapter.load_policy(self.model) + + self.init_rm_map() + self.model.print_policy() + if self.auto_build_role_links: + self.build_role_links() + + def load_filtered_policy(self, filter): + """reloads a filtered policy from file/database.""" + self.model.clear_policy() + + if not hasattr(self.adapter, "is_filtered"): + raise ValueError("filtered policies are not supported by this adapter") + + self.adapter.load_filtered_policy(self.model, filter) + self.init_rm_map() + self.model.print_policy() + if self.auto_build_role_links: + self.build_role_links() + + def load_increment_filtered_policy(self, filter): + """LoadIncrementalFilteredPolicy append a filtered policy from file/database.""" + if not hasattr(self.adapter, "is_filtered"): + raise ValueError("filtered policies are not supported by this adapter") + + self.adapter.load_filtered_policy(self.model, filter) + self.model.print_policy() + if self.auto_build_role_links: + self.build_role_links() + + def is_filtered(self): + """returns true if the loaded policy has been filtered.""" + + return hasattr(self.adapter, "is_filtered") and self.adapter.is_filtered() + + def save_policy(self): + if self.is_filtered(): + raise RuntimeError("cannot save a filtered policy") + + self.adapter.save_policy(self.model) + + if self.watcher: + self.watcher.update() + + def enable_enforce(self, enabled=True): + """changes the enforcing state of Casbin, + when Casbin is disabled, all access will be allowed by the Enforce() function. + """ + + self.enabled = enabled + + def enable_auto_save(self, auto_save): + """controls whether to save a policy rule automatically to the adapter when it is added or removed.""" + self.auto_save = auto_save + + def enable_auto_build_role_links(self, auto_build_role_links): + """controls whether to rebuild the role inheritance relations when a role is added or deleted.""" + self.auto_build_role_links = auto_build_role_links + + def build_role_links(self): + """manually rebuild the role inheritance relations.""" + + for rm in self.rm_map.values(): + rm.clear() + + self.model.build_role_links(self.rm_map) + + def add_named_matching_func(self, ptype, fn): + """add_named_matching_func add MatchingFunc by ptype RoleManager""" + try: + self.rm_map[ptype].add_matching_func(fn) + return True + except: + return False + + def add_named_domain_matching_func(self, ptype, fn): + """add_named_domain_matching_func add MatchingFunc by ptype to RoleManager""" + try: + self.rm_map[ptype].add_domain_matching_func(fn) + return True + except: + return False + + def enforce(self, *rvals): + """decides whether a "subject" can access a "object" with the operation "action", + input parameters are usually: (sub, obj, act). + """ + result, _ = self.enforce_ex(*rvals) + return result + + def enforce_ex(self, *rvals): + """decides whether a "subject" can access a "object" with the operation "action", + input parameters are usually: (sub, obj, act). + return judge result with reason + """ + + if not self.enabled: + return False + + functions = self.fm.get_functions() + + if "g" in self.model.model.keys(): + for key, ast in self.model.model["g"].items(): + rm = ast.rm + functions[key] = generate_g_function(rm) + + if "m" not in self.model.model.keys(): + raise RuntimeError("model is undefined") + + if "m" not in self.model.model["m"].keys(): + raise RuntimeError("model is undefined") + + r_tokens = self.model.model["r"]["r"].tokens + p_tokens = self.model.model["p"]["p"].tokens + + if len(r_tokens) != len(rvals): + raise RuntimeError("invalid request size") + + exp_string = self.model.model["m"]["m"].value + has_eval = util.has_eval(exp_string) + if not has_eval: + expression = self._get_expression(exp_string, functions) + + policy_effects = set() + + r_parameters = dict(zip(r_tokens, rvals)) + + policy_len = len(self.model.model["p"]["p"].policy) + + explain_index = -1 + if not 0 == policy_len: + for i, pvals in enumerate(self.model.model["p"]["p"].policy): + if len(p_tokens) != len(pvals): + raise RuntimeError("invalid policy size") + + p_parameters = dict(zip(p_tokens, pvals)) + parameters = dict(r_parameters, **p_parameters) + + if util.has_eval(exp_string): + rule_names = util.get_eval_value(exp_string) + rules = [util.escape_assertion(p_parameters[rule_name]) for rule_name in rule_names] + exp_with_rule = util.replace_eval(exp_string, rules) + expression = self._get_expression(exp_with_rule, functions) + + result = expression.eval(parameters) + + if isinstance(result, bool): + if not result: + policy_effects.add(Effector.INDETERMINATE) + continue + elif isinstance(result, float): + if 0 == result: + policy_effects.add(Effector.INDETERMINATE) + continue + else: + raise RuntimeError("matcher result should be bool, int or float") + + if "p_eft" in parameters.keys(): + eft = parameters["p_eft"] + if "allow" == eft: + policy_effects.add(Effector.ALLOW) + elif "deny" == eft: + policy_effects.add(Effector.DENY) + else: + policy_effects.add(Effector.INDETERMINATE) + else: + policy_effects.add(Effector.ALLOW) + + if self.eft.intermediate_effect(policy_effects) != Effector.INDETERMINATE: + explain_index = i + break + + else: + if has_eval: + raise RuntimeError("please make sure rule exists in policy when using eval() in matcher") + + parameters = r_parameters.copy() + + for token in self.model.model["p"]["p"].tokens: + parameters[token] = "" + + result = expression.eval(parameters) + + if result: + policy_effects.add(Effector.ALLOW) + else: + policy_effects.add(Effector.INDETERMINATE) + + final_effect = self.eft.final_effect(policy_effects) + result = effect_to_bool(final_effect) + + # Log request. + + req_str = "Request: " + req_str = req_str + ", ".join([str(v) for v in rvals]) + + req_str = req_str + " ---> %s" % result + if result: + self.logger.info(req_str) + else: + # leaving this in error for now, if it's very noise this can be changed to info or debug + self.logger.error(req_str) + + explain_rule = [] + if explain_index != -1 and explain_index < policy_len: + explain_rule = self.model.model["p"]["p"].policy[explain_index] + + return result, explain_rule + + @staticmethod + def _get_expression(expr, functions=None): + expr = expr.replace("&&", "and") + expr = expr.replace("||", "or") + expr = expr.replace("!", "not") + + return SimpleEval(expr, functions) diff --git a/utils/casbin/distributed_enforcer.py b/utils/casbin/distributed_enforcer.py new file mode 100644 index 0000000..d1e6406 --- /dev/null +++ b/utils/casbin/distributed_enforcer.py @@ -0,0 +1,132 @@ +from utils.casbin import SyncedEnforcer +import logging + +from utils.casbin.persist import batch_adapter +from utils.casbin.model.policy_op import PolicyOp +from utils.casbin.persist.adapters import update_adapter + + +class DistributedEnforcer(SyncedEnforcer): + """DistributedEnforcer wraps SyncedEnforcer for dispatcher.""" + + def __init__(self, model=None, adapter=None): + self.logger = logging.getLogger() + SyncedEnforcer.__init__(self, model, adapter) + + def add_policy_self(self, should_persist, sec, ptype, rules): + """ + AddPolicySelf provides a method for dispatcher to add authorization rules to the current policy. + The function returns the rules affected and error. + """ + + no_exists_policy = [] + for rule in rules: + if not self.get_model().has_policy(sec, ptype, rule): + no_exists_policy.append(rule) + + if should_persist: + try: + if isinstance(self.adapter, batch_adapter): + self.adapter.add_policies(sec, ptype, rules) + except Exception as e: + self.logger.log("An error occurred: " + e) + + self.get_model().add_policies(sec, ptype, no_exists_policy) + + if sec == "g": + try: + self.build_incremental_role_links(PolicyOp.Policy_add, ptype, no_exists_policy) + except Exception as e: + self.logger.log("An exception occurred: " + e) + return no_exists_policy + + return no_exists_policy + + def remove_policy_self(self, should_persist, sec, ptype, rules): + """ + remove_policy_self provides a method for dispatcher to remove policies from current policy. + The function returns the rules affected and error. + """ + if(should_persist): + try: + if(isinstance(self.adapter, batch_adapter)): + self.adapter.remove_policy(sec, ptype, rules) + except Exception as e: + self.logger.log("An exception occurred: " + e) + + effected = self.get_model().remove_policies_with_effected(sec, ptype, rules) + + if sec == "g": + try: + self.build_incremental_role_links(PolicyOp.Policy_remove, ptype, rules) + except Exception as e: + self.logger.log("An exception occurred: " + e) + return effected + + return effected + + def remove_filtered_policy_self(self, should_persist, sec, ptype, field_index, *field_values): + """ + remove_filtered_policy_self provides a method for dispatcher to remove an authorization + rule from the current policy,field filters can be specified. + The function returns the rules affected and error. + """ + if should_persist: + try: + self.adapter.remove_filtered_policy(sec, ptype, field_index, field_values) + except Exception as e: + self.logger.log("An exception occurred: " + e) + + effects = self.get_model().remove_filtered_policy_returns_effects(sec, ptype, field_index, field_values) + + if sec == "g": + try: + self.build_incremental_role_links(PolicyOp.Policy_remove, ptype, effects) + except Exception as e: + self.logger.log("An exception occurred: " + e) + return effects + + return effects + + def clear_policy_self(self, should_persist): + """ + clear_policy_self provides a method for dispatcher to clear all rules from the current policy. + """ + if should_persist: + try: + self.adapter.save_policy(None) + except Exception as e: + self.logger.log("An exception occurred: " + e) + + self.get_model().clear_policy() + + def update_policy_self(self, should_persist, sec, ptype, old_rule, new_rule): + """ + update_policy_self provides a method for dispatcher to update an authorization rule from the current policy. + """ + if should_persist: + try: + if isinstance(self.adapter, update_adapter): + self.adapter.update_policy(sec, ptype, old_rule, new_rule) + except Exception as e: + self.logger.log("An exception occurred: " + e) + return False + + rule_updated = self.get_model().update_policy(sec, ptype, old_rule, new_rule) + + if not rule_updated: + return False + + if sec == "g": + try: + self.build_incremental_role_links(PolicyOp.Policy_remove, ptype, [old_rule]) + except Exception as e: + return False + + try: + self.build_incremental_role_links(PolicyOp.Policy_add, ptype, [new_rule]) + except Exception as e: + return False + + + return True \ No newline at end of file diff --git a/utils/casbin/effect/__init__.py b/utils/casbin/effect/__init__.py new file mode 100644 index 0000000..5163437 --- /dev/null +++ b/utils/casbin/effect/__init__.py @@ -0,0 +1,24 @@ +from .default_effectors import AllowOverrideEffector, DenyOverrideEffector, AllowAndDenyEffector, PriorityEffector +from .effector import Effector + +def get_effector(expr): + ''' creates an effector based on the current policy effect expression ''' + + if expr == "some(where (p_eft == allow))": + return AllowOverrideEffector() + elif expr == "!some(where (p_eft == deny))": + return DenyOverrideEffector() + elif expr == "some(where (p_eft == allow)) && !some(where (p_eft == deny))": + return AllowAndDenyEffector() + elif expr == "priority(p_eft) || deny": + return PriorityEffector() + else: + raise RuntimeError("unsupported effect") + +def effect_to_bool(effect): + """ """ + if effect == Effector.ALLOW: + return True + if effect == Effector.DENY: + return False + raise RuntimeError("effect can't be converted to boolean") \ No newline at end of file diff --git a/utils/casbin/effect/default_effectors.py b/utils/casbin/effect/default_effectors.py new file mode 100644 index 0000000..77d65eb --- /dev/null +++ b/utils/casbin/effect/default_effectors.py @@ -0,0 +1,61 @@ +from .effector import Effector + +class AllowOverrideEffector(Effector): + + def intermediate_effect(self, effects): + """ returns a intermediate effect based on the matched effects of the enforcer """ + if Effector.ALLOW in effects: + return Effector.ALLOW + return Effector.INDETERMINATE + + def final_effect(self, effects): + """ returns the final effect based on the matched effects of the enforcer """ + if Effector.ALLOW in effects: + return Effector.ALLOW + return Effector.DENY + +class DenyOverrideEffector(Effector): + + def intermediate_effect(self, effects): + """ returns a intermediate effect based on the matched effects of the enforcer """ + if Effector.DENY in effects: + return Effector.DENY + return Effector.INDETERMINATE + + def final_effect(self, effects): + """ returns the final effect based on the matched effects of the enforcer """ + if Effector.DENY in effects: + return Effector.DENY + return Effector.ALLOW + +class AllowAndDenyEffector(Effector): + + def intermediate_effect(self, effects): + """ returns a intermediate effect based on the matched effects of the enforcer """ + if Effector.DENY in effects: + return Effector.DENY + return Effector.INDETERMINATE + + def final_effect(self, effects): + """ returns the final effect based on the matched effects of the enforcer """ + if Effector.DENY in effects or Effector.ALLOW not in effects: + return Effector.DENY + return Effector.ALLOW + +class PriorityEffector(Effector): + + def intermediate_effect(self, effects): + """ returns a intermediate effect based on the matched effects of the enforcer """ + if Effector.ALLOW in effects: + return Effector.ALLOW + if Effector.DENY in effects: + return Effector.DENY + return Effector.INDETERMINATE + + def final_effect(self, effects): + """ returns the final effect based on the matched effects of the enforcer """ + if Effector.ALLOW in effects: + return Effector.ALLOW + if Effector.DENY in effects: + return Effector.DENY + return Effector.DENY diff --git a/utils/casbin/effect/effector.py b/utils/casbin/effect/effector.py new file mode 100644 index 0000000..13b0888 --- /dev/null +++ b/utils/casbin/effect/effector.py @@ -0,0 +1,19 @@ +class Effector: + """Effector is the interface for Casbin effectors.""" + + ALLOW = 0 + + INDETERMINATE = 1 + + DENY = 2 + + def intermediate_effect(self, effects): + """ returns a intermediate effect based on the matched effects of the enforcer """ + pass + + def final_effect(self, effects): + """ returns the final effect based on the matched effects of the enforcer """ + pass + + + diff --git a/utils/casbin/enforcer.py b/utils/casbin/enforcer.py new file mode 100644 index 0000000..b9d0c92 --- /dev/null +++ b/utils/casbin/enforcer.py @@ -0,0 +1,231 @@ +from utils.casbin.management_enforcer import ManagementEnforcer +from utils.casbin.util import join_slice, set_subtract + + +class Enforcer(ManagementEnforcer): + """ + Enforcer = ManagementEnforcer + RBAC_API + RBAC_WITH_DOMAIN_API + """ + + """creates an enforcer via file or DB. + + File: + e = casbin.Enforcer("path/to/basic_model.conf", "path/to/basic_policy.csv") + MySQL DB: + a = mysqladapter.DBAdapter("mysql", "mysql_username:mysql_password@tcp(127.0.0.1:3306)/") + e = casbin.Enforcer("path/to/basic_model.conf", a) + """ + + def get_roles_for_user(self, name): + """ gets the roles that a user has. """ + return self.model.model["g"]["g"].rm.get_roles(name) + + def get_users_for_role(self, name): + """ gets the users that has a role. """ + return self.model.model["g"]["g"].rm.get_users(name) + + def has_role_for_user(self, name, role): + """ determines whether a user has a role. """ + roles = self.get_roles_for_user(name) + return any(r == role for r in roles) + + def add_role_for_user(self, user, role): + """ + adds a role for a user. + Returns false if the user already has the role (aka not affected). + """ + return self.add_grouping_policy(user, role) + + def delete_role_for_user(self, user, role): + """ + deletes a role for a user. + Returns false if the user does not have the role (aka not affected). + """ + return self.remove_grouping_policy(user, role) + + def delete_roles_for_user(self, user): + """ + deletes all roles for a user. + Returns false if the user does not have any roles (aka not affected). + """ + return self.remove_filtered_grouping_policy(0, user) + + def delete_user(self, user): + """ + deletes a user. + Returns false if the user does not exist (aka not affected). + """ + res1 = self.remove_filtered_grouping_policy(0, user) + + res2 = self.remove_filtered_policy(0, user) + return res1 or res2 + + def delete_role(self, role): + """ + deletes a role. + Returns false if the role does not exist (aka not affected). + """ + res1 = self.remove_filtered_grouping_policy(1, role) + + res2 = self.remove_filtered_policy(0, role) + return res1 or res2 + + def delete_permission(self, *permission): + """ + deletes a permission. + Returns false if the permission does not exist (aka not affected). + """ + return self.remove_filtered_policy(1, *permission) + + def add_permission_for_user(self, user, *permission): + """ + adds a permission for a user or role. + Returns false if the user or role already has the permission (aka not affected). + """ + return self.add_policy(join_slice(user, *permission)) + + def delete_permission_for_user(self, user, *permission): + """ + deletes a permission for a user or role. + Returns false if the user or role does not have the permission (aka not affected). + """ + return self.remove_policy(join_slice(user, *permission)) + + def delete_permissions_for_user(self, user): + """ + deletes permissions for a user or role. + Returns false if the user or role does not have any permissions (aka not affected). + """ + return self.remove_filtered_policy(0, user) + + def get_permissions_for_user(self, user): + """ + gets permissions for a user or role. + """ + return self.get_filtered_policy(0, user) + + def has_permission_for_user(self, user, *permission): + """ + determines whether a user has a permission. + """ + return self.has_policy(join_slice(user, *permission)) + + def get_implicit_roles_for_user(self, name, domain=None): + """ + gets implicit roles that a user has. + Compared to get_roles_for_user(), this function retrieves indirect roles besides direct roles. + For example: + g, alice, role:admin + g, role:admin, role:user + + get_roles_for_user("alice") can only get: ["role:admin"]. + But get_implicit_roles_for_user("alice") will get: ["role:admin", "role:user"]. + """ + res = [] + queue = [name] + + while queue: + name = queue.pop(0) + + for rm in self.rm_map.values(): + roles = rm.get_roles(name, domain) + for r in roles: + if r not in res: + res.append(r) + queue.append(r) + + return res + + def get_implicit_permissions_for_user(self, user, domain=None): + """ + gets implicit permissions for a user or role. + Compared to get_permissions_for_user(), this function retrieves permissions for inherited roles. + For example: + p, admin, data1, read + p, alice, data2, read + g, alice, admin + + get_permissions_for_user("alice") can only get: [["alice", "data2", "read"]]. + But get_implicit_permissions_for_user("alice") will get: [["admin", "data1", "read"], ["alice", "data2", "read"]]. + """ + roles = self.get_implicit_roles_for_user(user, domain) + + roles.insert(0, user) + + res = [] + for role in roles: + if domain: + permissions = self.get_permissions_for_user_in_domain(role, domain) + else: + permissions = self.get_permissions_for_user(role) + + res.extend(permissions) + + return res + + def get_implicit_users_for_permission(self, *permission): + """ + gets implicit users for a permission. + For example: + p, admin, data1, read + p, bob, data1, read + g, alice, admin + + get_implicit_users_for_permission("data1", "read") will get: ["alice", "bob"]. + Note: only users will be returned, roles (2nd arg in "g") will be excluded. + """ + subjects = self.get_all_subjects() + roles = self.get_all_roles() + + users = set_subtract(subjects, roles) + + res = list() + for user in users: + req = join_slice(user, *permission) + allowed = self.enforce(*req) + + if allowed: + res.append(user) + + return res + + def get_roles_for_user_in_domain(self, name, domain): + """gets the roles that a user has inside a domain.""" + return self.model.model['g']['g'].rm.get_roles(name, domain) + + def get_users_for_role_in_domain(self, name, domain): + """gets the users that has a role inside a domain.""" + return self.model.model['g']['g'].rm.get_users(name, domain) + + def add_role_for_user_in_domain(self, user, role, domain): + """adds a role for a user inside a domain.""" + """Returns false if the user already has the role (aka not affected).""" + return self.add_grouping_policy(user, role, domain) + + def delete_roles_for_user_in_domain(self, user, role, domain): + """deletes a role for a user inside a domain.""" + """Returns false if the user does not have any roles (aka not affected).""" + return self.remove_filtered_grouping_policy(0, user, role, domain) + + def get_permissions_for_user_in_domain(self, user, domain): + """gets permissions for a user or role inside domain.""" + return self.get_filtered_policy(0, user, domain) + + def get_all_users_by_domain(self, domain): + """获得所有与该域相关联的用户""" + data = self.get_filtered_grouping_policy(2, domain) + res = [] + for item in data: + res.append({ + 'username': item[0], + 'role_id': item[1] + }) + return res + + def get_domains_for_user(self, user): + """获取用户拥有的所有域名""" + data = self.get_filtered_grouping_policy(0, user) + res = [] + for item in data: + res.append(item[2]) + return res diff --git a/utils/casbin/internal_enforcer.py b/utils/casbin/internal_enforcer.py new file mode 100644 index 0000000..0b9a1ac --- /dev/null +++ b/utils/casbin/internal_enforcer.py @@ -0,0 +1,122 @@ +from utils.casbin.core_enforcer import CoreEnforcer +from utils.casbin.model.policy_op import PolicyOp + +class InternalEnforcer(CoreEnforcer): + """ + InternalEnforcer = CoreEnforcer + Internal API. + """ + + def _add_policy(self, sec, ptype, rule): + """adds a rule to the current policy.""" + rule_added = self.model.add_policy(sec, ptype, rule) + if not rule_added: + return rule_added + + if self.adapter and self.auto_save: + if self.adapter.add_policy(sec, ptype, rule) is False: + return False + + if self.watcher: + self.watcher.update() + + return rule_added + + def _add_policies(self,sec,ptype,rules): + """adds rules to the current policy.""" + rules_added = self.model.add_policies(sec, ptype, rules) + if not rules_added: + return rules_added + + if self.adapter and self.auto_save: + if hasattr(self.adapter,'add_policies') is False: + return False + + if self.adapter.add_policies(sec, ptype, rules) is False: + return False + + if self.watcher: + self.watcher.update() + + return rules_added + + def _update_policy(self, sec, ptype, old_rule, new_rule): + """updates a rule from the current policy.""" + rule_updated = self.model.update_policy(sec, ptype, old_rule, new_rule) + + if not rule_updated: + return rule_updated + + if self.adapter and self.auto_save: + + if self.adapter.update_policy(sec, ptype, old_rule, new_rule) is False: + return False + + if self.watcher: + self.watcher.update() + + return rule_updated + + def _update_policies(self, sec, ptype, old_rules, new_rules): + """updates rules from the current policy.""" + rules_updated = self.model.update_policies(sec, ptype, old_rules, new_rules) + + if not rules_updated: + return rules_updated + + if self.adapter and self.auto_save: + + if self.adapter.update_policies(sec, ptype, old_rules, new_rules) is False: + return False + + if self.watcher: + self.watcher.update() + + return rules_updated + + def _remove_policy(self, sec, ptype, rule): + """removes a rule from the current policy.""" + rule_removed = self.model.remove_policy(sec, ptype, rule) + if not rule_removed: + return rule_removed + + if self.adapter and self.auto_save: + if self.adapter.remove_policy(sec, ptype, rule) is False: + return False + + if self.watcher: + self.watcher.update() + + return rule_removed + + def _remove_policies(self, sec, ptype, rules): + """RemovePolicies removes policy rules from the model.""" + rules_removed = self.model.remove_policies(sec, ptype, rules) + if not rules_removed: + return rules_removed + + if self.adapter and self.auto_save: + if hasattr(self.adapter,'remove_policies') is False: + return False + + if self.adapter.remove_policies(sec, ptype, rules) is False: + return False + + if self.watcher: + self.watcher.update() + + return rules_removed + + def _remove_filtered_policy(self, sec, ptype, field_index, *field_values): + """removes rules based on field filters from the current policy.""" + rule_removed = self.model.remove_filtered_policy(sec, ptype, field_index, *field_values) + if not rule_removed: + return rule_removed + + if self.adapter and self.auto_save: + if self.adapter.remove_filtered_policy(sec, ptype, field_index, *field_values) is False: + return False + + if self.watcher: + self.watcher.update() + + return rule_removed \ No newline at end of file diff --git a/utils/casbin/management_enforcer.py b/utils/casbin/management_enforcer.py new file mode 100644 index 0000000..78851ed --- /dev/null +++ b/utils/casbin/management_enforcer.py @@ -0,0 +1,271 @@ +from utils.casbin.internal_enforcer import InternalEnforcer + +class ManagementEnforcer(InternalEnforcer): + """ + ManagementEnforcer = InternalEnforcer + Management API. + """ + + def get_all_subjects(self): + """gets the list of subjects that show up in the current policy.""" + return self.get_all_named_subjects('p') + + def get_all_named_subjects(self, ptype): + """gets the list of subjects that show up in the current named policy.""" + return self.model.get_values_for_field_in_policy('p', ptype, 0) + + def get_all_objects(self): + """gets the list of objects that show up in the current policy.""" + return self.get_all_named_objects('p') + + def get_all_named_objects(self, ptype): + """gets the list of objects that show up in the current named policy.""" + return self.model.get_values_for_field_in_policy('p', ptype, 1) + + def get_all_actions(self): + """gets the list of actions that show up in the current policy.""" + return self.get_all_named_actions('p') + + def get_all_named_actions(self, ptype): + """gets the list of actions that show up in the current named policy.""" + return self.model.get_values_for_field_in_policy('p', ptype, 2) + + def get_all_roles(self): + """gets the list of roles that show up in the current named policy.""" + return self.get_all_named_roles('g') + + def get_all_named_roles(self, ptype): + """gets all the authorization rules in the policy.""" + return self.model.get_values_for_field_in_policy('g', ptype, 1) + + def get_policy(self): + """gets all the authorization rules in the policy.""" + return self.get_named_policy('p') + + def get_filtered_policy(self, field_index, *field_values): + """gets all the authorization rules in the policy, field filters can be specified.""" + return self.get_filtered_named_policy('p', field_index, *field_values) + + def get_named_policy(self, ptype): + """gets all the authorization rules in the named policy.""" + return self.model.get_policy('p', ptype) + + def get_filtered_named_policy(self, ptype, field_index, *field_values): + """gets all the authorization rules in the named policy, field filters can be specified.""" + return self.model.get_filtered_policy('p', ptype, field_index, *field_values) + + def get_grouping_policy(self): + """gets all the role inheritance rules in the policy.""" + return self.get_named_grouping_policy('g') + + def get_filtered_grouping_policy(self, field_index, *field_values): + """gets all the role inheritance rules in the policy, field filters can be specified.""" + return self.get_filtered_named_grouping_policy("g", field_index, *field_values) + + def get_named_grouping_policy(self, ptype): + """gets all the role inheritance rules in the policy.""" + return self.model.get_policy('g', ptype) + + def get_filtered_named_grouping_policy(self, ptype, field_index, *field_values): + """gets all the role inheritance rules in the policy, field filters can be specified.""" + return self.model.get_filtered_policy('g', ptype, field_index, *field_values) + + def has_policy(self, *params): + """determines whether an authorization rule exists.""" + return self.has_named_policy('p', *params) + + def has_named_policy(self, ptype, *params): + """determines whether a named authorization rule exists.""" + if len(params) == 1 and isinstance(params[0], list): + str_slice = params[0] + return self.model.has_policy('p', ptype, str_slice) + + return self.model.has_policy('p', ptype, list(params)) + + def add_policy(self, *params): + """adds an authorization rule to the current policy. + + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + return self.add_named_policy('p', *params) + + def add_policies(self,rules): + """adds authorization rules to the current policy. + + If the rule already exists, the function returns false for the corresponding rule and the rule will not be added. + Otherwise the function returns true for the corresponding rule by adding the new rule. + """ + return self.add_named_policies('p',rules) + + def add_named_policy(self, ptype, *params): + """adds an authorization rule to the current named policy. + + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + + if len(params) == 1 and isinstance(params[0], list): + str_slice = params[0] + rule_added = self._add_policy('p', ptype, str_slice) + else: + rule_added = self._add_policy('p', ptype, list(params)) + + return rule_added + + def add_named_policies(self,ptype,rules): + """adds authorization rules to the current named policy. + + If the rule already exists, the function returns false for the corresponding rule and the rule will not be added. + Otherwise the function returns true for the corresponding by adding the new rule.""" + return self._add_policies('p',ptype,rules) + + def update_policy(self, old_rule, new_rule): + """updates an authorization rule from the current policy.""" + return self.update_named_policy('p', old_rule, new_rule) + + def update_policies(self, old_rules, new_rules): + """updates authorization rules from the current policy.""" + return self.update_named_policies('p', old_rules, new_rules) + + def update_named_policy(self, ptype, old_rule, new_rule): + """updates an authorization rule from the current named policy.""" + return self._update_policy('p', ptype, old_rule, new_rule) + + def update_named_policies(self, ptype, old_rules, new_rules): + """updates authorization rules from the current named policy.""" + return self._update_policies('p', ptype, old_rules, new_rules) + + def remove_policy(self, *params): + """removes an authorization rule from the current policy.""" + return self.remove_named_policy('p', *params) + + def remove_policies(self,rules): + """removes authorization rules from the current policy.""" + return self.remove_named_policies('p',rules) + + def remove_filtered_policy(self, field_index, *field_values): + """removes an authorization rule from the current policy, field filters can be specified.""" + return self.remove_filtered_named_policy('p', field_index, *field_values) + + def remove_named_policy(self, ptype, *params): + """removes an authorization rule from the current named policy.""" + + if len(params) == 1 and isinstance(params[0], list): + str_slice = params[0] + rule_removed = self._remove_policy('p', ptype, str_slice) + else: + rule_removed = self._remove_policy('p', ptype, list(params)) + + return rule_removed + + def remove_named_policies(self,ptype,rules): + """removes authorization rules from the current named policy.""" + return self._remove_policies('p',ptype,rules) + + def remove_filtered_named_policy(self, ptype, field_index, *field_values): + """removes an authorization rule from the current named policy, field filters can be specified.""" + return self._remove_filtered_policy('p', ptype, field_index, *field_values) + + def has_grouping_policy(self, *params): + """determines whether a role inheritance rule exists.""" + + return self.has_named_grouping_policy('g', *params) + + def has_named_grouping_policy(self, ptype, *params): + """determines whether a named role inheritance rule exists.""" + + if len(params) == 1 and isinstance(params[0], list): + str_slice = params[0] + return self.model.has_policy('g', ptype, str_slice) + + return self.model.has_policy('g', ptype, list(params)) + + def add_grouping_policy(self, *params): + """adds a role inheritance rule to the current policy. + + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + return self.add_named_grouping_policy('g', *params) + + def add_grouping_policies(self,rules): + """adds role inheritance rulea to the current policy. + + If the rule already exists, the function returns false for the corresponding policy rule and the rule will not be added. + Otherwise the function returns true for the corresponding policy rule by adding the new rule. + """ + return self.add_named_grouping_policies('g',rules) + + def add_named_grouping_policy(self, ptype, *params): + """adds a named role inheritance rule to the current policy. + + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + + if len(params) == 1 and isinstance(params[0], list): + str_slice = params[0] + rule_added = self._add_policy('g', ptype, str_slice) + else: + rule_added = self._add_policy('g', ptype, list(params)) + + if self.auto_build_role_links: + self.build_role_links() + return rule_added + + def add_named_grouping_policies(self,ptype,rules): + """"adds named role inheritance rules to the current policy. + + If the rule already exists, the function returns false for the corresponding policy rule and the rule will not be added. + Otherwise the function returns true for the corresponding policy rule by adding the new rule.""" + rules_added = self._add_policies('g',ptype,rules) + if self.auto_build_role_links: + self.build_role_links() + + return rules_added + + def remove_grouping_policy(self, *params): + """removes a role inheritance rule from the current policy.""" + return self.remove_named_grouping_policy('g', *params) + + def remove_grouping_policies(self,rules): + """removes role inheritance rulea from the current policy.""" + return self.remove_named_grouping_policies('g',rules) + + def remove_filtered_grouping_policy(self, field_index, *field_values): + """removes a role inheritance rule from the current policy, field filters can be specified.""" + return self.remove_filtered_named_grouping_policy('g', field_index, *field_values) + + def remove_named_grouping_policy(self, ptype, *params): + """removes a role inheritance rule from the current named policy.""" + + if len(params) == 1 and isinstance(params[0], list): + str_slice = params[0] + rule_removed = self._remove_policy('g', ptype, str_slice) + else: + rule_removed = self._remove_policy('g', ptype, list(params)) + + if self.auto_build_role_links: + self.build_role_links() + return rule_removed + + def remove_named_grouping_policies(self,ptype,rules): + """ removes role inheritance rules from the current named policy.""" + rules_removed = self._remove_policies('g',ptype,rules) + + if self.auto_build_role_links: + self.build_role_links() + + return rules_removed + + def remove_filtered_named_grouping_policy(self, ptype, field_index, *field_values): + """removes a role inheritance rule from the current named policy, field filters can be specified.""" + rule_removed = self._remove_filtered_policy('g', ptype, field_index, *field_values) + + if self.auto_build_role_links: + self.build_role_links() + return rule_removed + + def add_function(self, name, func): + """adds a customized function.""" + self.fm.add_function(name, func) \ No newline at end of file diff --git a/utils/casbin/model/__init__.py b/utils/casbin/model/__init__.py new file mode 100644 index 0000000..2bdda40 --- /dev/null +++ b/utils/casbin/model/__init__.py @@ -0,0 +1,4 @@ +from .assertion import Assertion +from .model import Model +from .policy import Policy +from .function import FunctionMap diff --git a/utils/casbin/model/assertion.py b/utils/casbin/model/assertion.py new file mode 100644 index 0000000..3585c65 --- /dev/null +++ b/utils/casbin/model/assertion.py @@ -0,0 +1,47 @@ +import logging +from utils.casbin.model.policy_op import PolicyOp + + +class Assertion: + def __init__(self): + self.logger = logging.getLogger() + self.key = "" + self.value = "" + self.tokens = [] + self.policy = [] + self.rm = None + + def build_role_links(self, rm): + self.rm = rm + count = self.value.count("_") + if count < 2: + raise RuntimeError('the number of "_" in role definition should be at least 2') + + for rule in self.policy: + if len(rule) < count: + pass + # raise RuntimeError("grouping policy elements do not meet role definition") + if len(rule) > count: + rule = rule[:count] + + self.rm.add_link(*rule[:count]) + + self.logger.info("Role links for: {}".format(self.key)) + self.rm.print_roles() + + def build_incremental_role_links(self, rm, op, rules): + self.rm = rm + count = self.value.count("_") + if count < 2: + raise RuntimeError('the number of "_" in role definition should be at least 2') + for rule in rules: + if len(rule) < count: + raise TypeError("grouping policy elements do not meet role definition") + if len(rule) > count: + rule = rule[:count] + if op == PolicyOp.Policy_add: + rm.add_link(rule[0], rule[1], *rule[2:]) + elif op == PolicyOp.Policy_remove: + rm.delete_link(rule[0], rule[1], *rule[2:]) + else: + raise TypeError("Invalid operation: " + str(op)) diff --git a/utils/casbin/model/function.py b/utils/casbin/model/function.py new file mode 100644 index 0000000..731a588 --- /dev/null +++ b/utils/casbin/model/function.py @@ -0,0 +1,22 @@ +from utils.casbin import util + + +class FunctionMap: + fm = dict() + + def add_function(self, name, func): + self.fm[name] = func + + @staticmethod + def load_function_map(): + fm = FunctionMap() + fm.add_function("keyMatch", util.key_match_func) + fm.add_function("keyMatch2", util.key_match2_func) + fm.add_function("regexMatch", util.regex_match_func) + fm.add_function("ipMatch", util.ip_match_func) + fm.add_function("globMatch", util.glob_match_func) + + return fm + + def get_functions(self): + return self.fm diff --git a/utils/casbin/model/model.py b/utils/casbin/model/model.py new file mode 100644 index 0000000..483e8d6 --- /dev/null +++ b/utils/casbin/model/model.py @@ -0,0 +1,80 @@ +from . import Assertion +from utils.casbin import util, config +from .policy import Policy + +class Model(Policy): + + section_name_map = { + 'r': 'request_definition', + 'p': 'policy_definition', + 'g': 'role_definition', + 'e': 'policy_effect', + 'm': 'matchers', + } + + def _load_assertion(self, cfg, sec, key): + value = cfg.get(self.section_name_map[sec] + "::" + key) + + return self.add_def(sec, key, value) + + def add_def(self, sec, key, value): + if value == "": + return + + ast = Assertion() + ast.key = key + ast.value = value + + if "r" == sec or "p" == sec: + ast.tokens = ast.value.split(",") + for i,token in enumerate(ast.tokens): + ast.tokens[i] = key + "_" + token.strip() + else: + ast.value = util.remove_comments(util.escape_assertion(ast.value)) + + if sec not in self.model.keys(): + self.model[sec] = {} + + self.model[sec][key] = ast + + return True + + def _get_key_suffix(self, i): + if i == 1: + return "" + + return str(i) + + def _load_section(self, cfg, sec): + i = 1 + while True: + if not self._load_assertion(cfg, sec, sec + self._get_key_suffix(i)): + break + else: + i = i + 1 + + def load_model(self, path): + cfg = config.Config.new_config(path) + + self._load_section(cfg, "r") + self._load_section(cfg, "p") + self._load_section(cfg, "e") + self._load_section(cfg, "m") + + self._load_section(cfg, "g") + + def load_model_from_text(self, text): + cfg = config.Config.new_config_from_text(text) + + self._load_section(cfg, "r") + self._load_section(cfg, "p") + self._load_section(cfg, "e") + self._load_section(cfg, "m") + + self._load_section(cfg, "g") + + def print_model(self): + self.logger.info("Model:") + for k, v in self.model.items(): + for i, j in v.items(): + self.logger.info("%s.%s: %s", k, i, j.value) diff --git a/utils/casbin/model/policy.py b/utils/casbin/model/policy.py new file mode 100644 index 0000000..d56301f --- /dev/null +++ b/utils/casbin/model/policy.py @@ -0,0 +1,190 @@ +import logging + +class Policy: + def __init__(self): + self.logger = logging.getLogger() + self.model = {} + + def build_role_links(self, rm_map): + """initializes the roles in RBAC.""" + + if "g" not in self.model.keys(): + return + + for ptype, ast in self.model["g"].items(): + rm = rm_map[ptype] + ast.build_role_links(rm) + + def build_incremental_role_links(self, rm, op, sec, ptype, rules): + if sec == "g": + self.model.get(sec).get(ptype).build_incremental_role_links(rm, op, rules) + + def print_policy(self): + """Log using info""" + + self.logger.info("Policy:") + for sec in ["p", "g"]: + if sec not in self.model.keys(): + continue + + for key, ast in self.model[sec].items(): + self.logger.info("{} : {} : {}".format(key, ast.value, ast.policy)) + + def clear_policy(self): + """clears all current policy.""" + + for sec in ["p", "g"]: + if sec not in self.model.keys(): + continue + + for key in self.model[sec].keys(): + self.model[sec][key].policy = [] + + def get_policy(self, sec, ptype): + """gets all rules in a policy.""" + + return self.model[sec][ptype].policy + + def get_filtered_policy(self, sec, ptype, field_index, *field_values): + """gets rules based on field filters from a policy.""" + return [ + rule for rule in self.model[sec][ptype].policy + if all(value == "" or rule[field_index + i] == value for i, value in enumerate(field_values)) + ] + + def has_policy(self, sec, ptype, rule): + """determines whether a model has the specified policy rule.""" + if sec not in self.model.keys(): + return False + if ptype not in self.model[sec]: + return False + + return rule in self.model[sec][ptype].policy + + def add_policy(self, sec, ptype, rule): + """adds a policy rule to the model.""" + + if not self.has_policy(sec, ptype, rule): + self.model[sec][ptype].policy.append(rule) + return True + + return False + + def add_policies(self,sec,ptype,rules): + """adds policy rules to the model.""" + + for rule in rules: + if self.has_policy(sec,ptype,rule): + return False + + for rule in rules: + self.model[sec][ptype].policy.append(rule) + + return True + + def update_policy(self, sec, ptype, old_rule, new_rule): + """update a policy rule from the model.""" + + if not self.has_policy(sec, ptype, old_rule): + return False + + return self.remove_policy(sec, ptype, old_rule) and self.add_policy(sec, ptype, new_rule) + + def update_policies(self, sec, ptype, old_rules, new_rules): + """update policy rules from the model.""" + + for rule in old_rules: + if not self.has_policy(sec, ptype, rule): + return False + + return self.remove_policies(sec, ptype, old_rules) and self.add_policies(sec, ptype, new_rules) + + def remove_policy(self, sec, ptype, rule): + """removes a policy rule from the model.""" + if not self.has_policy(sec, ptype, rule): + return False + + self.model[sec][ptype].policy.remove(rule) + + return rule not in self.model[sec][ptype].policy + + def remove_policies(self, sec, ptype, rules): + """RemovePolicies removes policy rules from the model.""" + + for rule in rules: + if not self.has_policy(sec,ptype,rule): + return False + self.model[sec][ptype].policy.remove(rule) + if rule in self.model[sec][ptype].policy: + return False + + return True + + def remove_policies_with_effected(self, sec, ptype, rules): + effected = [] + for rule in rules: + if self.has_policy(sec, ptype, rule): + effected.append(rule) + self.remove_policy(sec, ptype, rule) + + return effected + + def remove_filtered_policy_returns_effects(self, sec, ptype, field_index, *field_values): + """ + remove_filtered_policy_returns_effects removes policy rules based on field filters from the model. + """ + tmp = [] + effects = [] + + if(len(field_values) == 0): + return [] + if sec not in self.model.keys(): + return [] + if ptype not in self.model[sec]: + return [] + + for rule in self.model[sec][ptype].policy: + if all(value == "" or rule[field_index + i] == value for i, value in enumerate(field_values[0])): + effects.append(rule) + else: + tmp.append(rule) + + self.model[sec][ptype].policy = tmp + + return effects + + + def remove_filtered_policy(self, sec, ptype, field_index, *field_values): + """removes policy rules based on field filters from the model.""" + tmp = [] + res = False + + if sec not in self.model.keys(): + return res + if ptype not in self.model[sec]: + return res + + for rule in self.model[sec][ptype].policy: + if all(value == "" or rule[field_index + i] == value for i, value in enumerate(field_values)): + res = True + else: + tmp.append(rule) + + self.model[sec][ptype].policy = tmp + + return res + + def get_values_for_field_in_policy(self, sec, ptype, field_index): + """gets all values for a field for all rules in a policy, duplicated values are removed.""" + values = [] + if sec not in self.model.keys(): + return values + if ptype not in self.model[sec]: + return values + + for rule in self.model[sec][ptype].policy: + value = rule[field_index] + if value not in values: + values.append(value) + + return values diff --git a/utils/casbin/model/policy_op.py b/utils/casbin/model/policy_op.py new file mode 100644 index 0000000..de4b146 --- /dev/null +++ b/utils/casbin/model/policy_op.py @@ -0,0 +1,5 @@ +import enum + +class PolicyOp(enum.Enum): + Policy_add = 1 + Policy_remove = 2 \ No newline at end of file diff --git a/utils/casbin/persist/__init__.py b/utils/casbin/persist/__init__.py new file mode 100644 index 0000000..0c22ec8 --- /dev/null +++ b/utils/casbin/persist/__init__.py @@ -0,0 +1,4 @@ +from .adapter import * +from .adapter_filtered import * +from .batch_adapter import * +from .adapters import * \ No newline at end of file diff --git a/utils/casbin/persist/adapter.py b/utils/casbin/persist/adapter.py new file mode 100644 index 0000000..b290be2 --- /dev/null +++ b/utils/casbin/persist/adapter.py @@ -0,0 +1,46 @@ +def load_policy_line(line, model): + """loads a text line as a policy rule to model.""" + + if line == "": + return + + if line[:1] == "#": + return + + tokens = line.split(", ") + key = tokens[0] + sec = key[0] + + if sec not in model.model.keys(): + return + + if key not in model.model[sec].keys(): + return + + model.model[sec][key].policy.append(tokens[1:]) + + +class Adapter: + """the interface for Casbin adapters.""" + + def load_policy(self, model): + """loads all policy rules from the storage.""" + pass + + def save_policy(self, model): + """saves all policy rules to the storage.""" + pass + + def add_policy(self, sec, ptype, rule): + """adds a policy rule to the storage.""" + pass + + def remove_policy(self, sec, ptype, rule): + """removes a policy rule from the storage.""" + pass + + def remove_filtered_policy(self, sec, ptype, field_index, *field_values): + """removes policy rules that match the filter from the storage. + This is part of the Auto-Save feature. + """ + pass diff --git a/utils/casbin/persist/adapter_filtered.py b/utils/casbin/persist/adapter_filtered.py new file mode 100644 index 0000000..fbb4fef --- /dev/null +++ b/utils/casbin/persist/adapter_filtered.py @@ -0,0 +1,13 @@ +from .adapter import Adapter + +""" FilteredAdapter is the interface for Casbin adapters supporting filtered policies.""" +class FilteredAdapter(Adapter): + def is_filtered(self): + """IsFiltered returns true if the loaded policy has been filtered + Marks if the loaded policy is filtered or not + """ + pass + + def load_filtered_policy(self, model, filter): + """Loads policy rules that match the filter from the storage.""" + pass \ No newline at end of file diff --git a/utils/casbin/persist/adapters/__init__.py b/utils/casbin/persist/adapters/__init__.py new file mode 100644 index 0000000..f1b9da1 --- /dev/null +++ b/utils/casbin/persist/adapters/__init__.py @@ -0,0 +1,2 @@ +from .file_adapter import FileAdapter +from .adapter_filtered import FilteredAdapter \ No newline at end of file diff --git a/utils/casbin/persist/adapters/adapter_filtered.py b/utils/casbin/persist/adapters/adapter_filtered.py new file mode 100644 index 0000000..928ec1e --- /dev/null +++ b/utils/casbin/persist/adapters/adapter_filtered.py @@ -0,0 +1,89 @@ +from utils.casbin import persist +from utils.casbin import model +from .file_adapter import FileAdapter +import os + +class Filter: + #P,G are string [] + P = [] + G = [] + +class FilteredAdapter (FileAdapter,persist.FilteredAdapter): + filtered = False + _file_path = "" + filter = Filter() + #new_filtered_adapte is the constructor for FilteredAdapter. + def __init__(self,file_path): + self.filtered = True + self._file_path = file_path + + def load_policy(self,model): + if not os.path.isfile(self._file_path): + raise RuntimeError("invalid file path, file path cannot be empty") + self.filtered=False + self._load_policy_file(model) + + #load_filtered_policy loads only policy rules that match the filter. + def load_filtered_policy(self,model,filter): + if filter == None: + return self.load_policy(model) + + if not os.path.isfile(self._file_path): + raise RuntimeError("invalid file path, file path cannot be empty") + + try: + filter_value = [filter.__dict__['P']]+[filter.__dict__['G']] + except: + raise RuntimeError("invalid filter type") + + self.load_filtered_policy_file(model,filter_value,persist.load_policy_line) + self.filtered = True + + def load_filtered_policy_file(self,model,filter,hanlder): + with open(self._file_path, "rb") as file: + while True: + line = file.readline() + line = line.decode().strip() + if line == '\n': + continue + if not line : + break + if filter_line(line,filter): + continue + + hanlder(line,model) + + #is_filtered returns true if the loaded policy has been filtered. + def is_filtered(self): + return self.filtered + def save_policy(self,model): + if self.filtered: + raise RuntimeError("cannot save a filtered policy") + + self._save_policy_file(model) + +def filter_line(line,filter): + if filter == None: + return False + + p = line.split(',') + if len(p) == 0: + return True + filter_slice = [] + + if p[0].strip()== 'p': + filter_slice = filter[0] + elif p[0].strip() == 'g': + filter_slice = filter[1] + return filter_words(p,filter_slice) + +def filter_words(line,filter): + if len(line) < len(filter)+1: + return True + skip_line=False + for i,v in enumerate(filter): + if(len(v) >0 and ( v.strip() != line[i+1].strip() ) ): + skip_line = True + break + + return skip_line \ No newline at end of file diff --git a/utils/casbin/persist/adapters/file_adapter.py b/utils/casbin/persist/adapters/file_adapter.py new file mode 100644 index 0000000..f6a81ce --- /dev/null +++ b/utils/casbin/persist/adapters/file_adapter.py @@ -0,0 +1,62 @@ +from utils.casbin import persist +import os + +class FileAdapter(persist.Adapter): + """the file adapter for Casbin. + It can load policy from file or save policy to file. + """ + _file_path = "" + + def __init__(self, file_path): + self._file_path = file_path + + def load_policy(self, model): + if not os.path.isfile(self._file_path): + raise RuntimeError("invalid file path, file path cannot be empty") + + self._load_policy_file(model) + + def save_policy(self, model): + if not os.path.isfile(self._file_path): + raise RuntimeError("invalid file path, file path cannot be empty") + + self._save_policy_file(model) + + def _load_policy_file(self, model): + with open(self._file_path, "rb") as file: + line = file.readline() + while line: + persist.load_policy_line(line.decode().strip(), model) + line = file.readline() + + def _save_policy_file(self, model): + with open(self._file_path, "w") as file: + lines = [] + + if "p" in model.model.keys(): + for key, ast in model.model["p"].items(): + for pvals in ast.policy: + lines.append(key + ", " + ", ".join(pvals)) + + if "g" in model.model.keys(): + for key, ast in model.model["g"].items(): + for pvals in ast.policy: + lines.append(key + ", " + ", ".join(pvals)) + + for i, line in enumerate(lines): + if i != len(lines) - 1: + lines[i] += "\n" + + file.writelines(lines) + + def add_policy(self, sec, ptype, rule): + pass + + def add_policies(self,sec,ptype,rules): + pass + + def remove_policy(self, sec, ptype, rule): + pass + + def remove_policies(self,sec,ptype,rules): + pass \ No newline at end of file diff --git a/utils/casbin/persist/adapters/update_adapter.py b/utils/casbin/persist/adapters/update_adapter.py new file mode 100644 index 0000000..11d4f3f --- /dev/null +++ b/utils/casbin/persist/adapters/update_adapter.py @@ -0,0 +1,9 @@ +class UpdateAdapter: + """ UpdateAdapter is the interface for Casbin adapters with add update policy function. """ + + def update_policy(self, sec, ptype, old_rule, new_policy): + """ + update_policy updates a policy rule from storage. + This is part of the Auto-Save feature. + """ + pass \ No newline at end of file diff --git a/utils/casbin/persist/batch_adapter.py b/utils/casbin/persist/batch_adapter.py new file mode 100644 index 0000000..6dbf88d --- /dev/null +++ b/utils/casbin/persist/batch_adapter.py @@ -0,0 +1,11 @@ +from .adapter import Adapter + +"""BatchAdapter is the interface for Casbin adapters with multiple add and remove policy functions.""" +class BatchAdapter(Adapter): + def add_policies(self,sec,ptype,rules): + """AddPolicies adds policy rules to the storage.""" + pass + + def remove_policies(self,sec,ptype,rules): + """RemovePolicies removes policy rules from the storage.""" + pass \ No newline at end of file diff --git a/utils/casbin/persist/dispatcher.py b/utils/casbin/persist/dispatcher.py new file mode 100644 index 0000000..b499cde --- /dev/null +++ b/utils/casbin/persist/dispatcher.py @@ -0,0 +1,21 @@ +class Dispatcher: + """Dispatcher is the interface for pycasbin dispatcher""" + def add_policies(self, sec, ptype, rules): + """add_policies adds policies rule to all instance.""" + pass + + def remove_policies(self, sec, ptype, rules): + """remove_policies removes policies rule from all instance.""" + pass + + def remove_filtered_policy(self, sec, ptype, field_index, field_values): + """remove_filtered_policy removes policy rules that match the filter from all instance.""" + pass + + def clear_policy(self): + """clear_policy clears all current policy in all instances.""" + pass + + def update_policy(self, sec, ptype, old_rule, new_rule): + """update_policy updates policy rule from all instance.""" + pass diff --git a/utils/casbin/rbac/__init__.py b/utils/casbin/rbac/__init__.py new file mode 100644 index 0000000..cd3c152 --- /dev/null +++ b/utils/casbin/rbac/__init__.py @@ -0,0 +1 @@ +from .role_manager import RoleManager diff --git a/utils/casbin/rbac/default_role_manager/__init__.py b/utils/casbin/rbac/default_role_manager/__init__.py new file mode 100644 index 0000000..4ff0284 --- /dev/null +++ b/utils/casbin/rbac/default_role_manager/__init__.py @@ -0,0 +1 @@ +from .role_manager import RoleManager \ No newline at end of file diff --git a/utils/casbin/rbac/default_role_manager/role_manager.py b/utils/casbin/rbac/default_role_manager/role_manager.py new file mode 100644 index 0000000..551af64 --- /dev/null +++ b/utils/casbin/rbac/default_role_manager/role_manager.py @@ -0,0 +1,219 @@ +import logging + +from utils.casbin.rbac import RoleManager + + +class RoleManager(RoleManager): + """provides a default implementation for the RoleManager interface""" + + all_roles = dict() + max_hierarchy_level = 0 + + def __init__(self, max_hierarchy_level): + self.logger = logging.getLogger() + self.all_roles = dict() + self.max_hierarchy_level = max_hierarchy_level + self.matching_func = None + self.domain_matching_func = None + self.has_pattern = None + self.has_domain_pattern = None + + def add_matching_func(self, fn=None): + self.has_pattern = True + self.matching_func = fn + + def add_domain_matching_func(self, fn=None): + self.has_domain_pattern = True + self.domain_matching_func = fn + + def has_role(self, name): + if self.matching_func is None: + return name in self.all_roles.keys() + else: + for key in self.all_roles.keys(): + if self.matching_func(name, key): + return True + return False + + def create_role(self, name): + if name not in self.all_roles.keys(): + self.all_roles[name] = Role(name) + + return self.all_roles[name] + + def clear(self): + self.all_roles.clear() + + def add_link(self, name1, name2, *domain): + if len(domain) == 1: + name1 = domain[0] + "::" + name1 + name2 = domain[0] + "::" + name2 + elif len(domain) > 1: + raise RuntimeError("error: domain should be 1 parameter") + + role1 = self.create_role(name1) + role2 = self.create_role(name2) + role1.add_role(role2) + + if self.matching_func is not None: + for key, role in self.all_roles.items(): + if self.matching_func(key, name1) and name1 != key: + self.all_roles[key].add_role(role1) + if self.matching_func(key, name2) and name2 != key: + self.all_roles[name2].add_role(role) + if self.matching_func(name1, key) and name1 != key: + self.all_roles[key].add_role(role1) + if self.matching_func(name2, key) and name2 != key: + self.all_roles[name2].add_role(role) + + def delete_link(self, name1, name2, *domain): + if len(domain) == 1: + name1 = domain[0] + "::" + name1 + name2 = domain[0] + "::" + name2 + elif len(domain) > 1: + raise RuntimeError("error: domain should be 1 parameter") + + if not self.has_role(name1) or not self.has_role(name2): + raise RuntimeError("error: name1 or name2 does not exist") + + role1 = self.create_role(name1) + role2 = self.create_role(name2) + role1.delete_role(role2) + + def has_link(self, name1, name2, *domain): + if len(domain) == 1: + name1 = domain[0] + "::" + name1 + name2 = domain[0] + "::" + name2 + elif len(domain) > 1: + raise RuntimeError("error: domain should be 1 parameter") + + if name1 == name2: + return True + + if not self.has_role(name1) or not self.has_role(name2): + return False + + if self.matching_func is None: + role1 = self.create_role(name1) + return role1.has_role(name2, self.max_hierarchy_level) + else: + for key, role in self.all_roles.items(): + if self.matching_func(name1, key) and role.has_role(name2, self.max_hierarchy_level, + self.matching_func): + return True + return False + + def get_roles(self, name, domain=None): + """ + gets the roles that a subject inherits. + domain is a prefix to the roles. + """ + if domain: + name = domain + "::" + name + + if not self.has_role(name): + return [] + + roles = self.create_role(name).get_roles() + if domain: + for key, value in enumerate(roles): + roles[key] = value[len(domain) + 2:] + + return roles + + def get_users(self, name, *domain): + """ + gets the users that inherits a subject. + domain is an unreferenced parameter here, may be used in other implementations. + """ + if len(domain) == 1: + name = domain[0] + "::" + name + elif len(domain) > 1: + return RuntimeError("error: domain should be 1 parameter") + + if not self.has_role(name): + return [] + + names = [] + for role in self.all_roles.values(): + if role.has_direct_role(name): + if len(domain) == 1: + names.append(role.name[len(domain[0]) + 2:]) + else: + names.append(role.name) + + return names + + def print_roles(self): + line = [] + for role in self.all_roles.values(): + text = role.to_string() + if text: + line.append(text) + self.logger.info(", ".join(line)) + + +class Role: + """represents the data structure for a role in RBAC.""" + + name = "" + + roles = [] + + def __init__(self, name): + self.name = name + self.roles = [] + + def add_role(self, role): + for rr in self.roles: + if rr.name == role.name: + return + + self.roles.append(role) + + def delete_role(self, role): + for rr in self.roles: + if rr.name == role.name: + self.roles.remove(rr) + return + + def has_role(self, name, hierarchy_level, matching_func=None): + if self.has_direct_role(name, matching_func): + return True + if hierarchy_level <= 0: + return False + + for role in self.roles: + if role.has_role(name, hierarchy_level - 1, matching_func): + return True + + return False + + def has_direct_role(self, name, matching_func=None): + if matching_func is None: + for role in self.roles: + if role.name == name: + return True + else: + for role in self.roles: + if matching_func(name, role.name): + return True + return False + + def to_string(self): + if len(self.roles) == 0: + return "" + + names = ", ".join(self.get_roles()) + + if len(self.roles) == 1: + return self.name + " < " + names + else: + return self.name + " < (" + names + ")" + + def get_roles(self): + names = [] + for role in self.roles: + names.append(role.name) + + return names diff --git a/utils/casbin/rbac/role_manager.py b/utils/casbin/rbac/role_manager.py new file mode 100644 index 0000000..9c1cefc --- /dev/null +++ b/utils/casbin/rbac/role_manager.py @@ -0,0 +1,23 @@ +class RoleManager: + """provides interface to define the operations for managing roles.""" + + def clear(self): + pass + + def add_link(self, name1, name2, *domain): + pass + + def delete_link(self, name1, name2, *domain): + pass + + def has_link(self, name1, name2, *domain): + pass + + def get_roles(self, name, *domain): + pass + + def get_users(self, name, *domain): + pass + + def print_roles(self): + pass diff --git a/utils/casbin/synced_enforcer.py b/utils/casbin/synced_enforcer.py new file mode 100644 index 0000000..84d469e --- /dev/null +++ b/utils/casbin/synced_enforcer.py @@ -0,0 +1,600 @@ +import threading +import time + +from utils.casbin.enforcer import Enforcer +from utils.casbin.util.rwlock import RWLockWrite + + +class AtomicBool(): + + def __init__(self, value): + self._lock = threading.Lock() + self._value = value + + @property + def value(self): + with self._lock: + return self._value + + @value.setter + def value(self, value): + with self._lock: + self._value = value + +class SyncedEnforcer(): + + """SyncedEnforcer wraps Enforcer and provides synchronized access. + It's also a drop-in replacement for Enforcer""" + + def __init__(self, model=None, adapter=None): + self._e = Enforcer(model, adapter) + self._rwlock = RWLockWrite() + self._rl = self._rwlock.gen_rlock() + self._wl = self._rwlock.gen_wlock() + self._auto_loading = AtomicBool(False) + self._auto_loading_thread = None + + def is_auto_loading_running(self): + """check if SyncedEnforcer is auto loading policies""" + return self._auto_loading.value + + def _auto_load_policy(self, interval): + while self.is_auto_loading_running(): + time.sleep(interval) + self.load_policy() + + def start_auto_load_policy(self, interval): + """starts a thread that will call load_policy every interval seconds""" + if self.is_auto_loading_running(): + return + self._auto_loading.value = True + self._auto_loading_thread = threading.Thread(target=self._auto_load_policy, args=[interval], daemon=True) + self._auto_loading_thread.start() + + def stop_auto_load_policy(self): + """stops the thread started by start_auto_load_policy""" + if self.is_auto_loading_running(): + self._auto_loading.value = False + + def get_model(self): + """gets the current model.""" + with self._rl: + return self._e.get_model() + + def set_model(self, m): + """sets the current model.""" + with self._wl: + return self._e.set_model(m) + + def load_model(self): + """reloads the model from the model CONF file. + Because the policy is attached to a model, so the policy is invalidated and needs to be reloaded by calling LoadPolicy(). + """ + with self._wl: + return self._e.load_model() + + def get_role_manager(self): + """gets the current role manager.""" + with self._rl: + return self._e.get_role_manager() + + def set_role_manager(self, rm): + with self._wl: + self._e.set_role_manager(rm) + + def get_adapter(self): + """gets the current adapter.""" + with self._rl: + self._e.get_adapter() + + def set_adapter(self, adapter): + """sets the current adapter.""" + with self._wl: + self._e.set_adapter(adapter) + + def set_watcher(self, watcher): + """sets the current watcher.""" + with self._wl: + self._e.set_watcher(watcher) + + def set_effector(self, eft): + """sets the current effector.""" + with self._wl: + self._e.set_effector(eft) + + def clear_policy(self): + """ clears all policy.""" + with self._wl: + return self._e.clear_policy() + + def load_policy(self): + """reloads the policy from file/database.""" + with self._wl: + return self._e.load_policy() + + def load_filtered_policy(self, filter): + """"reloads a filtered policy from file/database.""" + with self._wl: + return self._e.load_filtered_policy(filter) + + def save_policy(self): + with self._rl: + return self._e.save_policy() + + def build_role_links(self): + """manually rebuild the role inheritance relations.""" + with self._rl: + return self._e.build_role_links() + + def enforce(self, *rvals): + """decides whether a "subject" can access a "object" with the operation "action", + input parameters are usually: (sub, obj, act). + """ + with self._rl: + return self._e.enforce(*rvals) + + def enforce_ex(self, *rvals): + """decides whether a "subject" can access a "object" with the operation "action", + input parameters are usually: (sub, obj, act). + return judge result with reason + """ + with self._rl: + return self._e.enforce_ex(*rvals) + + def get_all_subjects(self): + """gets the list of subjects that show up in the current policy.""" + with self._rl: + return self._e.get_all_subjects() + + def get_all_named_subjects(self, ptype): + """gets the list of subjects that show up in the current named policy.""" + with self._rl: + return self._e.get_all_named_subjects(ptype) + + def get_all_objects(self): + """gets the list of objects that show up in the current policy.""" + with self._rl: + return self._e.get_all_objects() + + def get_all_named_objects(self, ptype): + """gets the list of objects that show up in the current named policy.""" + with self._rl: + return self._e.get_all_named_objects(ptype) + + def get_all_actions(self): + """gets the list of actions that show up in the current policy.""" + with self._rl: + return self._e.get_all_actions() + + def get_all_named_actions(self, ptype): + """gets the list of actions that show up in the current named policy.""" + with self._rl: + return self._e.get_all_named_actions(ptype) + + def get_all_roles(self): + """gets the list of roles that show up in the current named policy.""" + with self._rl: + return self._e.get_all_roles() + + def get_all_named_roles(self, ptype): + """gets all the authorization rules in the policy.""" + with self._rl: + return self._e.get_all_named_roles(ptype) + + def get_policy(self): + """gets all the authorization rules in the policy.""" + with self._rl: + return self._e.get_policy() + + def get_filtered_policy(self, field_index, *field_values): + """gets all the authorization rules in the policy, field filters can be specified.""" + with self._rl: + return self._e.get_filtered_policy(field_index, *field_values) + + def get_named_policy(self, ptype): + """gets all the authorization rules in the named policy.""" + with self._rl: + return self._e.get_named_policy(ptype) + + def get_filtered_named_policy(self, ptype, field_index, *field_values): + """gets all the authorization rules in the named policy, field filters can be specified.""" + with self._rl: + return self._e.get_filtered_named_policy(ptype, field_index, *field_values) + + def get_grouping_policy(self): + """gets all the role inheritance rules in the policy.""" + with self._rl: + return self._e.get_grouping_policy() + + def get_filtered_grouping_policy(self, field_index, *field_values): + """gets all the role inheritance rules in the policy, field filters can be specified.""" + with self._rl: + return self._e.get_filtered_grouping_policy(field_index, *field_values) + + def get_named_grouping_policy(self, ptype): + """gets all the role inheritance rules in the policy.""" + with self._rl: + return self._e.get_named_grouping_policy(ptype) + + def get_filtered_named_grouping_policy(self, ptype, field_index, *field_values): + """gets all the role inheritance rules in the policy, field filters can be specified.""" + with self._rl: + return self._e.get_filtered_named_grouping_policy(ptype, field_index, *field_values) + + def has_policy(self, *params): + """determines whether an authorization rule exists.""" + with self._rl: + return self._e.has_policy(*params) + + def has_named_policy(self, ptype, *params): + """determines whether a named authorization rule exists.""" + with self._rl: + return self._e.has_named_policy(ptype, *params) + + def add_policy(self, *params): + """adds an authorization rule to the current policy. + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + with self._wl: + return self._e.add_policy(*params) + + def add_named_policy(self, ptype, *params): + """adds an authorization rule to the current named policy. + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + with self._wl: + return self._e.add_named_policy(ptype, *params) + + def remove_policy(self, *params): + """removes an authorization rule from the current policy.""" + with self._wl: + return self._e.remove_policy(*params) + + def remove_filtered_policy(self, field_index, *field_values): + """removes an authorization rule from the current policy, field filters can be specified.""" + with self._wl: + return self._e.remove_filtered_policy(field_index, *field_values) + + def remove_named_policy(self, ptype, *params): + """removes an authorization rule from the current named policy.""" + with self._wl: + return self._e.remove_named_policy(ptype, *params) + + def remove_filtered_named_policy(self, ptype, field_index, *field_values): + """removes an authorization rule from the current named policy, field filters can be specified.""" + with self._wl: + return self._e.remove_filtered_named_policy(ptype, field_index, *field_values) + + def has_grouping_policy(self, *params): + """determines whether a role inheritance rule exists.""" + with self._rl: + return self._e.has_grouping_policy(*params) + + def has_named_grouping_policy(self, ptype, *params): + """determines whether a named role inheritance rule exists.""" + with self._rl: + return self._e.has_named_grouping_policy(ptype, *params) + + def add_grouping_policy(self, *params): + """adds a role inheritance rule to the current policy. + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + with self._wl: + return self._e.add_grouping_policy(*params) + + def add_named_grouping_policy(self, ptype, *params): + """adds a named role inheritance rule to the current policy. + If the rule already exists, the function returns false and the rule will not be added. + Otherwise the function returns true by adding the new rule. + """ + with self._wl: + return self._e.add_named_grouping_policy(ptype, *params) + + def remove_grouping_policy(self, *params): + """removes a role inheritance rule from the current policy.""" + with self._wl: + return self._e.remove_grouping_policy(*params) + + def remove_filtered_grouping_policy(self, field_index, *field_values): + """removes a role inheritance rule from the current policy, field filters can be specified.""" + with self._wl: + return self._e.remove_filtered_grouping_policy(field_index, *field_values) + + def remove_named_grouping_policy(self, ptype, *params): + """removes a role inheritance rule from the current named policy.""" + with self._wl: + return self._e.remove_named_grouping_policy(ptype, *params) + + def remove_filtered_named_grouping_policy(self, ptype, field_index, *field_values): + """removes a role inheritance rule from the current named policy, field filters can be specified.""" + with self._wl: + return self._e.remove_filtered_named_grouping_policy(ptype, field_index, *field_values) + + def add_function(self, name, func): + """adds a customized function.""" + with self._wl: + return self._e.add_function(name, func) + + # enforcer.py + + def get_roles_for_user(self, name): + """ gets the roles that a user has. """ + with self._rl: + return self._e.get_roles_for_user(name) + + def get_users_for_role(self, name): + """ gets the users that has a role. """ + with self._rl: + return self._e.get_users_for_role(name) + + def has_role_for_user(self, name, role): + """ determines whether a user has a role. """ + with self._rl: + return self._e.has_role_for_user(name, role) + + def add_role_for_user(self, user, role): + """ + adds a role for a user. + Returns false if the user already has the role (aka not affected). + """ + with self._wl: + return self._e.add_role_for_user(user, role) + + def delete_role_for_user(self, user, role): + """ + deletes a role for a user. + Returns false if the user does not have the role (aka not affected). + """ + with self._wl: + return self._e.delete_role_for_user(user, role) + + def delete_roles_for_user(self, user): + """ + deletes all roles for a user. + Returns false if the user does not have any roles (aka not affected). + """ + with self._wl: + return self._e.delete_roles_for_user(user) + + def delete_user(self, user): + """ + deletes a user. + Returns false if the user does not exist (aka not affected). + """ + with self._wl: + return self._e.delete_user(user) + + def delete_role(self, role): + """ + deletes a role. + Returns false if the role does not exist (aka not affected). + """ + with self._wl: + return self._e.delete_role(role) + + def delete_permission(self, *permission): + """ + deletes a permission. + Returns false if the permission does not exist (aka not affected). + """ + with self._wl: + return self._e.delete_permission(*permission) + + def add_permission_for_user(self, user, *permission): + """ + adds a permission for a user or role. + Returns false if the user or role already has the permission (aka not affected). + """ + with self._wl: + return self._e.add_permission_for_user(user, *permission) + + def delete_permission_for_user(self, user, *permission): + """ + deletes a permission for a user or role. + Returns false if the user or role does not have the permission (aka not affected). + """ + with self._wl: + return self._e.delete_permission_for_user(user, *permission) + + def delete_permissions_for_user(self, user): + """ + deletes permissions for a user or role. + Returns false if the user or role does not have any permissions (aka not affected). + """ + with self._wl: + return self._e.delete_permissions_for_user(user) + + def get_permissions_for_user(self, user): + """ + gets permissions for a user or role. + """ + with self._rl: + return self._e.get_permissions_for_user(user) + + def has_permission_for_user(self, user, *permission): + """ + determines whether a user has a permission. + """ + with self._rl: + return self._e.has_permission_for_user(user, *permission) + + def get_implicit_roles_for_user(self, name, *domain): + """ + gets implicit roles that a user has. + Compared to get_roles_for_user(), this function retrieves indirect roles besides direct roles. + For example: + g, alice, role:admin + g, role:admin, role:user + + get_roles_for_user("alice") can only get: ["role:admin"]. + But get_implicit_roles_for_user("alice") will get: ["role:admin", "role:user"]. + """ + with self._rl: + return self._e.get_implicit_roles_for_user(name, *domain) + + def get_implicit_permissions_for_user(self, user, *domain): + """ + gets implicit permissions for a user or role. + Compared to get_permissions_for_user(), this function retrieves permissions for inherited roles. + For example: + p, admin, data1, read + p, alice, data2, read + g, alice, admin + + get_permissions_for_user("alice") can only get: [["alice", "data2", "read"]]. + But get_implicit_permissions_for_user("alice") will get: [["admin", "data1", "read"], ["alice", "data2", "read"]]. + """ + with self._rl: + return self._e.get_implicit_permissions_for_user(user, *domain) + + def get_implicit_users_for_permission(self, *permission): + """ + gets implicit users for a permission. + For example: + p, admin, data1, read + p, bob, data1, read + g, alice, admin + + get_implicit_users_for_permission("data1", "read") will get: ["alice", "bob"]. + Note: only users will be returned, roles (2nd arg in "g") will be excluded. + """ + with self._rl: + return self._e.get_implicit_users_for_permission(*permission) + + def get_roles_for_user_in_domain(self, name, domain): + """gets the roles that a user has inside a domain.""" + with self._rl: + return self._e.get_roles_for_user_in_domain(name, domain) + + def get_users_for_role_in_domain(self, name, domain): + """gets the users that has a role inside a domain.""" + with self._rl: + return self._e.get_users_for_role_in_domain(name, domain) + + def add_role_for_user_in_domain(self, user, role, domain): + """adds a role for a user inside a domain.""" + """Returns false if the user already has the role (aka not affected).""" + with self._wl: + return self._e.add_role_for_user_in_domain(user, role, domain) + + def delete_roles_for_user_in_domain(self, user, role, domain): + """deletes a role for a user inside a domain.""" + """Returns false if the user does not have any roles (aka not affected).""" + with self._wl: + return self._e.delete_roles_for_user_in_domain(user, role, domain) + + def get_permissions_for_user_in_domain(self, user, domain): + """gets permissions for a user or role inside domain.""" + with self._rl: + return self._e.get_permissions_for_user_in_domain(user, domain) + + def enable_auto_build_role_links(self, auto_build_role_links): + """controls whether to rebuild the role inheritance relations when a role is added or deleted.""" + with self._wl: + return self._e.enable_auto_build_role_links(auto_build_role_links) + + def enable_auto_save(self, auto_save): + """controls whether to save a policy rule automatically to the adapter when it is added or removed.""" + with self._wl: + return self._e.enable_auto_save(auto_save) + + def enable_enforce(self, enabled=True): + """changes the enforcing state of Casbin, + when Casbin is disabled, all access will be allowed by the Enforce() function. + """ + with self._wl: + return self._e.enable_enforce(enabled) + + def add_named_matching_func(self, ptype, fn): + """add_named_matching_func add MatchingFunc by ptype RoleManager""" + with self._wl: + self._e.add_named_matching_func(ptype, fn) + + def add_named_domain_matching_func(self, ptype, fn): + """add_named_domain_matching_func add MatchingFunc by ptype to RoleManager""" + with self._wl: + self._e.add_named_domain_matching_func(ptype, fn) + + def is_filtered(self): + """returns true if the loaded policy has been filtered.""" + with self._rl: + self._e.is_filtered() + + def add_policies(self,rules): + """adds authorization rules to the current policy. + + If the rule already exists, the function returns false for the corresponding rule and the rule will not be added. + Otherwise the function returns true for the corresponding rule by adding the new rule. + """ + with self._wl: + return self._e.add_policies(rules) + + def add_named_policies(self,ptype,rules): + """adds authorization rules to the current named policy. + + If the rule already exists, the function returns false for the corresponding rule and the rule will not be added. + Otherwise the function returns true for the corresponding by adding the new rule.""" + with self._wl: + return self._e.add_named_policies(ptype,rules) + + def remove_policies(self,rules): + """removes authorization rules from the current policy.""" + with self._wl: + return self._e.remove_policies(rules) + + def remove_named_policies(self,ptype,rules): + """removes authorization rules from the current named policy.""" + with self._wl: + return self._e.remove_named_policies(ptype,rules) + + def add_grouping_policies(self,rules): + """adds role inheritance rulea to the current policy. + + If the rule already exists, the function returns false for the corresponding policy rule and the rule will not be added. + Otherwise the function returns true for the corresponding policy rule by adding the new rule. + """ + with self._wl: + return self._e.add_grouping_policies(rules) + + def add_named_grouping_policies(self,ptype,rules): + """"adds named role inheritance rules to the current policy. + + If the rule already exists, the function returns false for the corresponding policy rule and the rule will not be added. + Otherwise the function returns true for the corresponding policy rule by adding the new rule.""" + with self._wl: + return self._e.add_named_grouping_policies(ptype,rules) + + def remove_grouping_policies(self,rules): + """removes role inheritance rulea from the current policy.""" + with self._wl: + return self._e.addremove_grouping_policies_policies(rules) + + def remove_named_grouping_policies(self,ptype,rules): + """ removes role inheritance rules from the current named policy.""" + with self._wl: + return self._e.remove_named_grouping_policies(ptype,rules) + + def build_incremental_role_links(self, op, ptype, rules): + self.get_model().build_incremental_role_links(self.get_role_manager(), op, "g", ptype, rules) + + def get_all_users_by_domain(self, domain): + """获得所有与该域相关联的用户""" + data = self.get_filtered_grouping_policy(2, domain) + res = [] + for item in data: + res.append({ + 'username': item[0], + 'role_id': item[1] + }) + return res + + def get_domains_for_user(self, user): + """获取用户拥有的所有域名""" + data = self.get_filtered_grouping_policy(0, user) + res = [] + for item in data: + res.append(item[2]) + return res \ No newline at end of file diff --git a/utils/casbin/util/__init__.py b/utils/casbin/util/__init__.py new file mode 100644 index 0000000..a40b64e --- /dev/null +++ b/utils/casbin/util/__init__.py @@ -0,0 +1,3 @@ +from .builtin_operators import * +from .expression import * +from .util import * \ No newline at end of file diff --git a/utils/casbin/util/builtin_operators.py b/utils/casbin/util/builtin_operators.py new file mode 100644 index 0000000..509eba4 --- /dev/null +++ b/utils/casbin/util/builtin_operators.py @@ -0,0 +1,137 @@ +import fnmatch +import re +import ipaddress + +KEY_MATCH2_PATTERN = re.compile(r'(.*?):[^\/]+(.*?)') +KEY_MATCH3_PATTERN = re.compile(r'(.*?){[^\/]+}(.*?)') + + +def key_match(key1, key2): + """determines whether key1 matches the pattern of key2 (similar to RESTful path), key2 can contain a *. + For example, "/foo/bar" matches "/foo/*" + """ + + i = key2.find("*") + if i == -1: + return key1 == key2 + + if len(key1) > i: + return key1[:i] == key2[:i] + return key1 == key2[:i] + + +def key_match_func(*args): + """The wrapper for key_match. + """ + name1 = args[0] + name2 = args[1] + + return key_match(name1, name2) + + +def key_match2(key1, key2): + """determines whether key1 matches the pattern of key2 (similar to RESTful path), key2 can contain a *. + For example, "/foo/bar" matches "/foo/*", "/resource1" matches "/:resource" + """ + + key2 = key2.replace("/*", "/.*") + key2 = KEY_MATCH2_PATTERN.sub(r'\g<1>[^\/]+\g<2>', key2, 0) + + return regex_match(key1, "^" + key2 + "$") + + +def key_match2_func(*args): + name1 = args[0] + name2 = args[1] + + return key_match2(name1, name2) + + +def key_match3(key1, key2): + """determines determines whether key1 matches the pattern of key2 (similar to RESTful path), key2 can contain a *. + For example, "/foo/bar" matches "/foo/*", "/resource1" matches "/{resource}" + """ + + key2 = key2.replace("/*", "/.*") + key2 = KEY_MATCH3_PATTERN.sub(r'\g<1>[^\/]+\g<2>', key2, 0) + + return regex_match(key1, "^" + key2 + "$") + + +def key_match3_func(*args): + name1 = args[0] + name2 = args[1] + + return key_match3(name1, name2) + + +def regex_match(key1, key2): + """determines whether key1 matches the pattern of key2 in regular expression.""" + + res = re.match(key2, key1) + if res: + return True + else: + return False + + +def regex_match_func(*args): + """the wrapper for RegexMatch.""" + + name1 = args[0] + name2 = args[1] + + return regex_match(name1, name2) + + +def glob_match(string, pattern): + """determines whether string matches the pattern in glob expression.""" + return fnmatch.fnmatch(string, pattern) + + +def glob_match_func(*args): + """the wrapper for globMatch.""" + + string = args[0] + pattern = args[1] + + return glob_match(string, pattern) + + +def ip_match(ip1, ip2): + """IPMatch determines whether IP address ip1 matches the pattern of IP address ip2, ip2 can be an IP address or a CIDR pattern. + For example, "192.168.2.123" matches "192.168.2.0/24" + """ + ip1 = ipaddress.ip_address(ip1) + try: + network = ipaddress.ip_network(ip2, strict=False) + return ip1 in network + except ValueError: + return ip1 == ip2 + + +def ip_match_func(*args): + """the wrapper for IPMatch.""" + + ip1 = args[0] + ip2 = args[1] + + return ip_match(ip1, ip2) + + +def generate_g_function(rm): + """the factory method of the g(_, _) function.""" + + def f(*args): + name1 = args[0] + name2 = args[1] + + if not rm: + return name1 == name2 + elif 2 == len(args): + return rm.has_link(name1, name2) + else: + domain = str(args[2]) + return rm.has_link(name1, name2, domain) + + return f diff --git a/utils/casbin/util/expression.py b/utils/casbin/util/expression.py new file mode 100644 index 0000000..cd9a20d --- /dev/null +++ b/utils/casbin/util/expression.py @@ -0,0 +1,29 @@ +from simpleeval import SimpleEval +import ast + + +class SimpleEval(SimpleEval): + """ Rewrite SimpleEval. + >>> s = SimpleEval("20 + 30 - ( 10 * 5)") + >>> s.eval() + 0 + """ + + ast_parsed_value = None + + def __init__(self, expr, functions=None): + """Create the evaluator instance. Set up valid operators (+,-, etc) + functions (add, random, get_val, whatever) and names. """ + super(SimpleEval, self).__init__(functions=functions) + if expr != "": + self.expr = expr + self.expr_parsed_value = ast.parse(expr.strip()).body[0].value + + def eval(self, names=None): + """ evaluate an expresssion, using the operators, functions and + names previously set up. """ + + if names: + self.names = names + + return self._eval(self.expr_parsed_value) diff --git a/utils/casbin/util/rwlock.py b/utils/casbin/util/rwlock.py new file mode 100644 index 0000000..2f54b90 --- /dev/null +++ b/utils/casbin/util/rwlock.py @@ -0,0 +1,68 @@ +from threading import RLock, Condition + +# This implementation was adapted from https://en.wikipedia.org/wiki/Readers%E2%80%93writer_lock + +class RWLockWrite(): + ''' write preferring readers-wirter lock ''' + + def __init__(self): + self._lock = RLock() + self._cond = Condition(self._lock) + self._active_readers = 0 + self._waiting_writers = 0 + self._writer_active = False + + def aquire_read(self): + with self._lock: + while self._waiting_writers > 0 or self._writer_active: + self._cond.wait() + self._active_readers += 1 + + def release_read(self): + with self._lock: + self._active_readers -= 1 + if self._active_readers == 0: + self._cond.notify_all() + + def aquire_write(self): + with self._lock: + self._waiting_writers += 1 + while self._active_readers > 0 or self._writer_active: + self._cond.wait() + self._waiting_writers -= 1 + self._writer_active = True + + def release_write(self): + with self._lock: + self._writer_active = False + self._cond.notify_all() + + def gen_rlock(self): + return ReadRWLock(self) + + def gen_wlock(self): + return WriteRWLock(self) + +class ReadRWLock(): + + def __init__(self, rwlock): + self.rwlock = rwlock + + def __enter__(self): + self.rwlock.aquire_read() + + def __exit__(self, exc_type, exc_value, traceback): + self.rwlock.release_read() + return False + +class WriteRWLock(): + + def __init__(self, rwlock): + self.rwlock = rwlock + + def __enter__(self): + self.rwlock.aquire_write() + + def __exit__(self, exc_type, exc_value, traceback): + self.rwlock.release_write() + return False diff --git a/utils/casbin/util/util.py b/utils/casbin/util/util.py new file mode 100644 index 0000000..baa0d56 --- /dev/null +++ b/utils/casbin/util/util.py @@ -0,0 +1,72 @@ +from collections import OrderedDict +import re + +eval_reg = re.compile(r'\beval\((?P