From 1b1daa581e2ccc61e9283bd1b4c7ac14f97d10be Mon Sep 17 00:00:00 2001 From: houhuan Date: Tue, 9 Dec 2025 10:19:02 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E6=95=B4=E7=82=B9=E4=B8=8D?= =?UTF-8?q?=E7=94=9F=E6=88=90=E6=95=B0=E6=8D=AEbug=EF=BC=8C=E6=96=B0?= =?UTF-8?q?=E5=A2=9E=E6=9C=BA=E5=99=A8=E4=BA=BA=E6=8E=A8=E9=80=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.log | 3 + backend/app.py | 225 ++++++++++++++++++++++++++++++++------- backend/requirements.txt | 1 + config.json | 7 +- data.db | Bin 0 -> 20480 bytes data/data.db | Bin 0 -> 16384 bytes instance/data.db | Bin 65536 -> 65536 bytes scripts/set_yesterday.py | 62 +++++++++++ 8 files changed, 255 insertions(+), 43 deletions(-) create mode 100644 data.db create mode 100644 scripts/set_yesterday.py diff --git a/app.log b/app.log index 9f3c858..15cd563 100644 --- a/app.log +++ b/app.log @@ -1 +1,4 @@ 准备发送消息: 【益选便利店】2025-12-07的营业额:2408.7 +准备发送消息: 【益选便利店】2025-12-08的营业额:3402.6 +准备发送消息: 【益选便利店】2025-12-08的营业额:3629.76 +准备发送消息: 【益选便利店】2025-12-06的营业额:1803.09 diff --git a/backend/app.py b/backend/app.py index 6a4d52f..1ef44f7 100644 --- a/backend/app.py +++ b/backend/app.py @@ -14,32 +14,31 @@ import json import time import csv import io +import requests +import hmac +import hashlib +import base64 load_dotenv() app = Flask(__name__, static_folder="../frontend", static_url_path="/static") CORS(app) -app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv("DATABASE_URL", "sqlite:///data/data.db") + +# 强制使用绝对路径解决 Windows 路径问题 +base_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) +db_path = os.path.join(base_dir, "data", "data.db") +if os.name == 'nt': + # Windows 需要转义反斜杠,或者使用 4 个斜杠 + 驱动器号 + # SQLAlchemy 在 Windows 上通常接受 sqlite:///C:\path\to\file + app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///{db_path}" +else: + app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:////{db_path}" + def _ensure_sqlite_dir(url): - if not url.startswith('sqlite:'): - return - p = url - if p.startswith('sqlite:////'): - db_path = p.replace('sqlite:////', '') - if db_path[1:3] == ':/' or db_path[1:3] == ':\\': - # path like 'E:/...' with a leading slash - pass - elif db_path[0:2] == ':/': - # unlikely - db_path = db_path[1:] - # Normalize Windows backslashes - db_path = db_path.replace('/', os.sep) - elif p.startswith('sqlite:///'): - db_path = os.path.join(os.getcwd(), p.replace('sqlite:///', '')) - else: - return + # 已通过绝对路径计算,直接检查 data 目录 d = os.path.dirname(db_path) - if d and not os.path.exists(d): + if not os.path.exists(d): os.makedirs(d, exist_ok=True) + _ensure_sqlite_dir(app.config['SQLALCHEMY_DATABASE_URI']) db = SQLAlchemy(app) @@ -79,6 +78,9 @@ with app.app_context(): db.session.execute(text('ALTER TABLE daily_revenue ADD COLUMN note TEXT')) db.session.commit() +def push_feishu(date_str: str, amount: float, reason: str): + return + def generate_mock_revenue(): """保持原有逻辑:生成当日模拟营业额""" base = random.uniform(8000, 15000) @@ -90,25 +92,36 @@ def _append_log_line(date_str: str, amount: float, shop_name: str): line = f"准备发送消息: 【{shop_name}】{date_str}的营业额:{amount}" with open(log_path, 'a', encoding='utf-8') as f: f.write(line + "\n") + try: + print(line, flush=True) + except Exception: + pass -def daily_job(): - """定时任务:在本地 23:00 生成并定版当日数据(不重复生成)""" - cfg = load_config() - shop_name = cfg.get("shop_name", "益选便利店") - today = datetime.now().date() - existing = DailyRevenue.query.filter_by(date=today).first() - if existing: - if not existing.is_final: - existing.is_final = True - existing.source = existing.source or 'generator' - db.session.commit() - return - amount = gen_amount_for_date(today, cfg) - rev = DailyRevenue(date=today, amount=amount, is_final=True, source='generator') - db.session.add(rev) - db.session.add(AuditLog(date=today, old_amount=None, new_amount=amount, reason='daily_generate', actor='system', type='generate')) - db.session.commit() - _append_log_line(today.isoformat(), amount, shop_name) +def daily_job(target_date=None): + """定时任务:生成并定版指定日期的数据(默认当日)""" + with app.app_context(): + cfg = load_config() + shop_name = cfg.get("shop_name", "益选便利店") + if target_date is None: + target_date = datetime.now().date() + + existing = DailyRevenue.query.filter_by(date=target_date).first() + if existing: + if not existing.is_final: + existing.is_final = True + existing.source = existing.source or 'generator' + db.session.commit() + # 补推消息 + push_feishu(target_date.isoformat(), existing.amount, "daily_finalize") + return + + amount = gen_amount_for_date(target_date, cfg) + rev = DailyRevenue(date=target_date, amount=amount, is_final=True, source='generator') + db.session.add(rev) + db.session.add(AuditLog(date=target_date, old_amount=None, new_amount=amount, reason='daily_generate', actor='system', type='generate')) + db.session.commit() + _append_log_line(target_date.isoformat(), amount, shop_name) + push_feishu(target_date.isoformat(), amount, "daily_generate") def settle_today_if_due(): cfg = load_config() @@ -117,9 +130,20 @@ def settle_today_if_due(): cutoff = int(cutoff) except Exception: cutoff = 23 - if datetime.now().hour < cutoff: - return - daily_job() + # 只有当前时间 >= cutoff 才尝试结算今天 + if datetime.now().hour >= cutoff: + daily_job() + +def settle_past_days(): + """启动检查:补录过去3天未定版的数据(防止服务器宕机漏单)""" + with app.app_context(): + today = datetime.now().date() + for i in range(1, 4): + d = today - timedelta(days=i) + existing = DailyRevenue.query.filter_by(date=d).first() + if not existing or not existing.is_final: + print(f"补录数据: {d}") + daily_job(d) # ---- 日志解析与聚合 ---- def load_config(): @@ -253,6 +277,35 @@ def api_revenue(): rows = DailyRevenue.query.filter(DailyRevenue.date >= start).order_by(DailyRevenue.date.desc()).all() return jsonify([{"date": r.date.isoformat(), "amount": r.amount} for r in rows]) +@app.route("/api/audit") +def api_audit(): + limit = int(request.args.get("limit", 20)) + rows = AuditLog.query.order_by(AuditLog.created_at.desc()).limit(limit).all() + return jsonify([ + { + "date": r.date.isoformat(), + "old_amount": r.old_amount, + "new_amount": r.new_amount, + "reason": r.reason, + "actor": r.actor, + "type": r.type, + "created_at": r.created_at.isoformat(timespec='seconds') + } for r in rows + ]) + +@app.route("/api/health") +def api_health(): + cfg = load_config() + cutoff = int(cfg.get("cutoff_hour", 23)) + today_local = datetime.now().date() + r_today = DailyRevenue.query.filter_by(date=today_local).first() + return jsonify({ + "server_now": datetime.now().isoformat(timespec='seconds'), + "cutoff_hour": cutoff, + "today_finalized": bool(r_today and r_today.is_final), + "today_amount": (r_today.amount if r_today else None) + }) + @app.route("/api/export") def api_export(): """CSV导出""" @@ -291,8 +344,21 @@ def admin_correct(): db.session.add(AuditLog(date=d, old_amount=old, new_amount=new_amt, reason=reason, actor=actor, type='correct')) db.session.commit() _append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店')) + push_feishu(d.isoformat(), new_amt, reason) return jsonify({"ok": True}) +@app.route('/api/admin/test_push', methods=['POST']) +def admin_test_push(): + token = os.getenv('ADMIN_TOKEN') + if token and request.headers.get('X-Admin-Token') != token: + return jsonify({"error": "unauthorized"}), 401 + payload = request.get_json(silent=True) or {} + ds = payload.get('date') or datetime.now().date().isoformat() + amt = float(payload.get('amount') or 1234.56) + reason = payload.get('reason') or 'manual_test' + push_feishu(ds, amt, reason) + return jsonify({"ok": True, "pushed": {"date": ds, "amount": amt, "reason": reason}}) + def import_csv_text(text: str, actor: str = 'admin'): buf = io.StringIO(text) reader = csv.reader(buf) @@ -320,6 +386,7 @@ def import_csv_text(text: str, actor: str = 'admin'): r = DailyRevenue(date=d, amount=amt, is_final=True, source='import_csv') db.session.add(r) db.session.add(AuditLog(date=d, old_amount=old, new_amount=amt, reason='import_csv', actor=actor, type='import_log')) + push_feishu(d.isoformat(), amt, 'import_csv') imported += 1 db.session.commit() return imported @@ -363,11 +430,16 @@ def sync_log_to_db(): if __name__ == "__main__": local_tz = get_localzone() scheduler = BackgroundScheduler(timezone=local_tz) - scheduler.add_job(daily_job, "cron", hour=23, minute=0) + try: + cutoff = int(load_config().get("cutoff_hour", 23)) + except Exception: + cutoff = 23 + scheduler.add_job(daily_job, "cron", hour=cutoff, minute=0) scheduler.start() with app.app_context(): sync_log_to_db() auto_import_csv_on_start() + settle_past_days() settle_today_if_due() app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000"))) @@ -383,3 +455,74 @@ def sse_events(): yield "data: {\"type\": \"force_refresh\"}\n\n" time.sleep(30) return Response(stream_with_context(event_stream()), mimetype='text/event-stream') +def push_feishu(date_str: str, amount: float, reason: str): + cfg = load_config() + url = cfg.get("feishu_webhook_url") or cfg.get("webhook_url") + if not url: + return + shop = cfg.get("shop_name", "益选便利店") + try: + d = datetime.strptime(date_str, '%Y-%m-%d').date() + except Exception: + d = datetime.now().date() + y = d - timedelta(days=1) + r = DailyRevenue.query.filter_by(date=y).first() + y_amt = (r.amount if (r and r.is_final) else None) + arrow = '' + diff_str = '' + pct_str = '' + if isinstance(y_amt, (int, float)): + diff = amount - y_amt + arrow = '🔺' if diff >= 0 else '🔻' + diff_str = f"{'+' if diff >= 0 else '-'}{abs(diff):.2f}" + if y_amt != 0: + pct = abs(diff / y_amt) * 100.0 + pct_str = f"({'+' if diff >= 0 else '-'}{pct:.2f}%)" + today_line = f"**今日**:¥{amount:.2f}" + if isinstance(y_amt, (int, float)): + today_line += f" {arrow} {diff_str} {pct_str}".strip() + y_line = f"**昨日**:{('暂无数据' if y_amt is None else '¥' + format(y_amt, '.2f'))}" + card = { + "config": {"wide_screen_mode": True}, + "elements": [ + {"tag": "div", "text": {"tag": "lark_md", "content": f"📊 **{shop}** 营业额通知"}}, + {"tag": "hr"}, + {"tag": "div", "text": {"tag": "lark_md", "content": f"**日期**:{date_str}"}}, + {"tag": "div", "text": {"tag": "lark_md", "content": today_line}}, + {"tag": "div", "text": {"tag": "lark_md", "content": y_line}}, + {"tag": "note", "elements": [ + {"tag": "plain_text", "content": f"原因:{reason} | 时间:{datetime.now().isoformat(timespec='seconds')}"} + ]} + ] + } + payload = {"msg_type": "interactive", "card": card} + secret = cfg.get("feishu_secret") + if secret: + ts = str(int(time.time())) + sign_src = ts + "\n" + secret + sign = base64.b64encode(hmac.new(secret.encode(), sign_src.encode(), digestmod=hashlib.sha256).digest()).decode() + payload.update({"timestamp": ts, "sign": sign}) + def _log(s: str): + p = os.path.join(os.path.dirname(__file__), "..", "app.log") + with open(p, 'a', encoding='utf-8') as f: + f.write(s + "\n") + try: + print(s, flush=True) + except Exception: + pass + def _post_json(u: str, payload_obj: dict): + body = json.dumps(payload_obj, ensure_ascii=False).encode('utf-8') + return requests.post(u, data=body, headers={'Content-Type': 'application/json; charset=utf-8'}, timeout=5) + try: + resp = _post_json(url, payload) + ok = (200 <= resp.status_code < 300) + _log(f"飞书推送卡片{'成功' if ok else '失败'}: status={resp.status_code} {resp.text[:200]}") + if not ok: + text = f"📊 {shop}\n日期:{date_str}\n今日:¥{amount:.2f}" + if isinstance(y_amt, (int, float)): + text += f" {arrow} {diff_str} {pct_str}".strip() + text += f"\n原因:{reason}" + resp2 = _post_json(url, {"msg_type":"text","content":{"text": text}}) + _log(f"飞书推送文本{'成功' if (200 <= resp2.status_code < 300) else '失败'}: status={resp2.status_code} {resp2.text[:200]}") + except Exception as e: + _log(f"飞书推送异常: {str(e)[:200]}") diff --git a/backend/requirements.txt b/backend/requirements.txt index d8b6b98..91ee7e4 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -5,3 +5,4 @@ APScheduler==3.10.4 python-dotenv==1.0.0 tzlocal==5.2 tzdata==2025.1 +requests==2.32.3 diff --git a/config.json b/config.json index fedf371..eb6f2cc 100644 --- a/config.json +++ b/config.json @@ -1,5 +1,7 @@ { "webhook_url": "https://api.hiflow.tencent.com/engine/webhook/31/1869391857524076545", + "feishu_webhook_url": "https://open.feishu.cn/open-apis/bot/v2/hook/2ee06084-a0ed-4c3f-9b0b-e264194f7ddd", + "feishu_secret": "", "shop_name": "益选便利店", "weekday_range": [ 2900, @@ -8,5 +10,6 @@ "weekend_range": [ 1600, 2000 - ] -} \ No newline at end of file + ], + "cutoff_hour": 11 +} diff --git a/data.db b/data.db new file mode 100644 index 0000000000000000000000000000000000000000..36876a7fed02eceb1b6f20f5dea9f53510d400fd GIT binary patch literal 20480 zcmeI(&u$Vy90%}OphcP@J(zH)Dbo`rZ4Cu&+MbH-szm;kE{UXv&B#v8Cd+QS>}nft zYA-&EXXC{e@F{!%OEIWIAR53kHPaU_*JX$16 zL{Z2-rG${E*yqGPOc9ZZ4GZF38OzVgM#;VRcao`}B(``#Zr@FPPJR&?BnUtN0uX=z z1Rwwb2teRQ3B0Mt;^|CAdDr#WbBi}677|7$|D7^l0YUQd*57kGdRz;%+ zt!7i4k{(nZYo%%+~K!_qpBS6UCThJJGmivo7BdJ#%kY zlz2LuRjyw5$26E}UFt4>$!(uYg@`ns)btnPMO(GHWsoOf&C*^d2q9*pPDwa z!nrn_I$>P$atHq`m}b8jLb+?t@!ig}X<<8J;s%pz6!P83e7c%dh-n-AVh~2=drrTs zOL5S*q+)Pwe=GRGNs3)beI#N-f&c^{009U<00Izz00bZa0SG|gpAvYYB*<#Km?$oj zLcZ`|bE~kK-`W3ga<%g5=v!E`OY?=j{Py1c?I~3q$8~wfTM*}^q<#>wAwd8F5P$## zAOHafKmY;|fB*y_@ZSg|qpOMV|A3v~{(mI(m88Cl7bFNk00Izz00bZa0SG_<0uX=z z1g@pPVkDYLgip^bDbZ{~dJq8X|F30OanKNe00bZa0SG_<0uX=z1R!uD1p4d$Nj#Tc fE{<;H57+;9ne8)6A1v@^M%T0l`S7!&Cj))~T3Jf$;y9F! zP_eM^2M_}iTP0Q&7XATJ{{brlLPBEX&S{*wY57>ody;d$JKyWvV=uPfyUm*I6TInq zJ=4ca=py1cRKXY_Got!)}~S zCTt5=>$-MBYvFoB$MyAEZ5}5RmPsG}uCX~sd)=;Om_2vk`1pFQq3Y~NhwM!BdBp6y z4%W4e;1ILpyB@x$wpMSdt*j#E*gSuCn}mJxu&+ZW`h=dt56-Pu*EDwMomO>CZME@j zt&Ov`l}qNHU*_U7b93C@POxIjw7a{8N1hO8Kq8G;WaW{ee_s93l}A`k`P+8i*t8w9 zi&q+rnx@txa|YXtcw(u2ci?p-sLzqtCL`E_!>vRDvH`EvP`5k(a3-nQ!nFL>JAWawh$?dMHa zP7Y91@+bUhc2YW$ABRrTtI7)pNH`F_3t!H}C5Ai!_v!7T%c4$)L#2t|>0^c zW^%}7_gYBO1dRGeFLCWXq#pz40uZwYAkP7jfjW{R4YM60=dA-M11^&X j&{460)&jFG)7A_F4hs8|2jCsCfz%2ES%GJ>`|1T1>rNaw diff --git a/scripts/set_yesterday.py b/scripts/set_yesterday.py new file mode 100644 index 0000000..89ede2f --- /dev/null +++ b/scripts/set_yesterday.py @@ -0,0 +1,62 @@ +import os +import sqlite3 +from datetime import datetime, timedelta + +def _resolve_sqlite_path(url: str) -> str: + if not url.startswith('sqlite:///'): + raise ValueError('Only sqlite URLs are supported') + return url.replace('sqlite:///', '') + +def ensure_tables(conn: sqlite3.Connection) -> None: + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE IF NOT EXISTS daily_revenue ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + date DATE NOT NULL UNIQUE, + amount REAL NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_final INTEGER NOT NULL DEFAULT 0, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + source TEXT, + note TEXT + ) + """ + ) + cur.execute( + """ + CREATE TABLE IF NOT EXISTS audit_log ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + date DATE NOT NULL, + old_amount REAL, + new_amount REAL, + reason TEXT, + actor TEXT, + type TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP + ) + """ + ) + conn.commit() + +def set_yesterday(amount: float) -> None: + url = os.getenv('DATABASE_URL', 'sqlite:///data.db') + path = _resolve_sqlite_path(url) + os.makedirs(os.path.dirname(path) or '.', exist_ok=True) + conn = sqlite3.connect(path) + ensure_tables(conn) + cur = conn.cursor() + yday = (datetime.now().date() - timedelta(days=1)).isoformat() + cur.execute('SELECT amount FROM daily_revenue WHERE date=?', (yday,)) + row = cur.fetchone() + old = row[0] if row else None + if row: + cur.execute('UPDATE daily_revenue SET amount=?, is_final=1, source=? , updated_at=CURRENT_TIMESTAMP WHERE date=?', (amount, 'correct', yday)) + else: + cur.execute('INSERT INTO daily_revenue(date, amount, is_final, source) VALUES(?, ?, 1, ?)', (yday, amount, 'correct')) + cur.execute('INSERT INTO audit_log(date, old_amount, new_amount, reason, actor, type) VALUES(?, ?, ?, ?, ?, ?)', (yday, old, amount, 'manual_correct', 'admin', 'correct')) + conn.commit() + conn.close() + +if __name__ == '__main__': + set_yesterday(3629.76)