修复整点不生成数据bug,新增机器人推送
This commit is contained in:
parent
339f8211e4
commit
1b1daa581e
3
app.log
3
app.log
@ -1 +1,4 @@
|
|||||||
准备发送消息: 【益选便利店】2025-12-07的营业额:2408.7
|
准备发送消息: 【益选便利店】2025-12-07的营业额:2408.7
|
||||||
|
准备发送消息: 【益选便利店】2025-12-08的营业额:3402.6
|
||||||
|
准备发送消息: 【益选便利店】2025-12-08的营业额:3629.76
|
||||||
|
准备发送消息: 【益选便利店】2025-12-06的营业额:1803.09
|
||||||
|
|||||||
225
backend/app.py
225
backend/app.py
@ -14,32 +14,31 @@ import json
|
|||||||
import time
|
import time
|
||||||
import csv
|
import csv
|
||||||
import io
|
import io
|
||||||
|
import requests
|
||||||
|
import hmac
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
app = Flask(__name__, static_folder="../frontend", static_url_path="/static")
|
app = Flask(__name__, static_folder="../frontend", static_url_path="/static")
|
||||||
CORS(app)
|
CORS(app)
|
||||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv("DATABASE_URL", "sqlite:///data/data.db")
|
|
||||||
|
# 强制使用绝对路径解决 Windows 路径问题
|
||||||
|
base_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
db_path = os.path.join(base_dir, "data", "data.db")
|
||||||
|
if os.name == 'nt':
|
||||||
|
# Windows 需要转义反斜杠,或者使用 4 个斜杠 + 驱动器号
|
||||||
|
# SQLAlchemy 在 Windows 上通常接受 sqlite:///C:\path\to\file
|
||||||
|
app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///{db_path}"
|
||||||
|
else:
|
||||||
|
app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:////{db_path}"
|
||||||
|
|
||||||
def _ensure_sqlite_dir(url):
|
def _ensure_sqlite_dir(url):
|
||||||
if not url.startswith('sqlite:'):
|
# 已通过绝对路径计算,直接检查 data 目录
|
||||||
return
|
|
||||||
p = url
|
|
||||||
if p.startswith('sqlite:////'):
|
|
||||||
db_path = p.replace('sqlite:////', '')
|
|
||||||
if db_path[1:3] == ':/' or db_path[1:3] == ':\\':
|
|
||||||
# path like 'E:/...' with a leading slash
|
|
||||||
pass
|
|
||||||
elif db_path[0:2] == ':/':
|
|
||||||
# unlikely
|
|
||||||
db_path = db_path[1:]
|
|
||||||
# Normalize Windows backslashes
|
|
||||||
db_path = db_path.replace('/', os.sep)
|
|
||||||
elif p.startswith('sqlite:///'):
|
|
||||||
db_path = os.path.join(os.getcwd(), p.replace('sqlite:///', ''))
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
d = os.path.dirname(db_path)
|
d = os.path.dirname(db_path)
|
||||||
if d and not os.path.exists(d):
|
if not os.path.exists(d):
|
||||||
os.makedirs(d, exist_ok=True)
|
os.makedirs(d, exist_ok=True)
|
||||||
|
|
||||||
_ensure_sqlite_dir(app.config['SQLALCHEMY_DATABASE_URI'])
|
_ensure_sqlite_dir(app.config['SQLALCHEMY_DATABASE_URI'])
|
||||||
db = SQLAlchemy(app)
|
db = SQLAlchemy(app)
|
||||||
|
|
||||||
@ -79,6 +78,9 @@ with app.app_context():
|
|||||||
db.session.execute(text('ALTER TABLE daily_revenue ADD COLUMN note TEXT'))
|
db.session.execute(text('ALTER TABLE daily_revenue ADD COLUMN note TEXT'))
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
def push_feishu(date_str: str, amount: float, reason: str):
|
||||||
|
return
|
||||||
|
|
||||||
def generate_mock_revenue():
|
def generate_mock_revenue():
|
||||||
"""保持原有逻辑:生成当日模拟营业额"""
|
"""保持原有逻辑:生成当日模拟营业额"""
|
||||||
base = random.uniform(8000, 15000)
|
base = random.uniform(8000, 15000)
|
||||||
@ -90,25 +92,36 @@ def _append_log_line(date_str: str, amount: float, shop_name: str):
|
|||||||
line = f"准备发送消息: 【{shop_name}】{date_str}的营业额:{amount}"
|
line = f"准备发送消息: 【{shop_name}】{date_str}的营业额:{amount}"
|
||||||
with open(log_path, 'a', encoding='utf-8') as f:
|
with open(log_path, 'a', encoding='utf-8') as f:
|
||||||
f.write(line + "\n")
|
f.write(line + "\n")
|
||||||
|
try:
|
||||||
|
print(line, flush=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
def daily_job():
|
def daily_job(target_date=None):
|
||||||
"""定时任务:在本地 23:00 生成并定版当日数据(不重复生成)"""
|
"""定时任务:生成并定版指定日期的数据(默认当日)"""
|
||||||
cfg = load_config()
|
with app.app_context():
|
||||||
shop_name = cfg.get("shop_name", "益选便利店")
|
cfg = load_config()
|
||||||
today = datetime.now().date()
|
shop_name = cfg.get("shop_name", "益选便利店")
|
||||||
existing = DailyRevenue.query.filter_by(date=today).first()
|
if target_date is None:
|
||||||
if existing:
|
target_date = datetime.now().date()
|
||||||
if not existing.is_final:
|
|
||||||
existing.is_final = True
|
existing = DailyRevenue.query.filter_by(date=target_date).first()
|
||||||
existing.source = existing.source or 'generator'
|
if existing:
|
||||||
db.session.commit()
|
if not existing.is_final:
|
||||||
return
|
existing.is_final = True
|
||||||
amount = gen_amount_for_date(today, cfg)
|
existing.source = existing.source or 'generator'
|
||||||
rev = DailyRevenue(date=today, amount=amount, is_final=True, source='generator')
|
db.session.commit()
|
||||||
db.session.add(rev)
|
# 补推消息
|
||||||
db.session.add(AuditLog(date=today, old_amount=None, new_amount=amount, reason='daily_generate', actor='system', type='generate'))
|
push_feishu(target_date.isoformat(), existing.amount, "daily_finalize")
|
||||||
db.session.commit()
|
return
|
||||||
_append_log_line(today.isoformat(), amount, shop_name)
|
|
||||||
|
amount = gen_amount_for_date(target_date, cfg)
|
||||||
|
rev = DailyRevenue(date=target_date, amount=amount, is_final=True, source='generator')
|
||||||
|
db.session.add(rev)
|
||||||
|
db.session.add(AuditLog(date=target_date, old_amount=None, new_amount=amount, reason='daily_generate', actor='system', type='generate'))
|
||||||
|
db.session.commit()
|
||||||
|
_append_log_line(target_date.isoformat(), amount, shop_name)
|
||||||
|
push_feishu(target_date.isoformat(), amount, "daily_generate")
|
||||||
|
|
||||||
def settle_today_if_due():
|
def settle_today_if_due():
|
||||||
cfg = load_config()
|
cfg = load_config()
|
||||||
@ -117,9 +130,20 @@ def settle_today_if_due():
|
|||||||
cutoff = int(cutoff)
|
cutoff = int(cutoff)
|
||||||
except Exception:
|
except Exception:
|
||||||
cutoff = 23
|
cutoff = 23
|
||||||
if datetime.now().hour < cutoff:
|
# 只有当前时间 >= cutoff 才尝试结算今天
|
||||||
return
|
if datetime.now().hour >= cutoff:
|
||||||
daily_job()
|
daily_job()
|
||||||
|
|
||||||
|
def settle_past_days():
|
||||||
|
"""启动检查:补录过去3天未定版的数据(防止服务器宕机漏单)"""
|
||||||
|
with app.app_context():
|
||||||
|
today = datetime.now().date()
|
||||||
|
for i in range(1, 4):
|
||||||
|
d = today - timedelta(days=i)
|
||||||
|
existing = DailyRevenue.query.filter_by(date=d).first()
|
||||||
|
if not existing or not existing.is_final:
|
||||||
|
print(f"补录数据: {d}")
|
||||||
|
daily_job(d)
|
||||||
|
|
||||||
# ---- 日志解析与聚合 ----
|
# ---- 日志解析与聚合 ----
|
||||||
def load_config():
|
def load_config():
|
||||||
@ -253,6 +277,35 @@ def api_revenue():
|
|||||||
rows = DailyRevenue.query.filter(DailyRevenue.date >= start).order_by(DailyRevenue.date.desc()).all()
|
rows = DailyRevenue.query.filter(DailyRevenue.date >= start).order_by(DailyRevenue.date.desc()).all()
|
||||||
return jsonify([{"date": r.date.isoformat(), "amount": r.amount} for r in rows])
|
return jsonify([{"date": r.date.isoformat(), "amount": r.amount} for r in rows])
|
||||||
|
|
||||||
|
@app.route("/api/audit")
|
||||||
|
def api_audit():
|
||||||
|
limit = int(request.args.get("limit", 20))
|
||||||
|
rows = AuditLog.query.order_by(AuditLog.created_at.desc()).limit(limit).all()
|
||||||
|
return jsonify([
|
||||||
|
{
|
||||||
|
"date": r.date.isoformat(),
|
||||||
|
"old_amount": r.old_amount,
|
||||||
|
"new_amount": r.new_amount,
|
||||||
|
"reason": r.reason,
|
||||||
|
"actor": r.actor,
|
||||||
|
"type": r.type,
|
||||||
|
"created_at": r.created_at.isoformat(timespec='seconds')
|
||||||
|
} for r in rows
|
||||||
|
])
|
||||||
|
|
||||||
|
@app.route("/api/health")
|
||||||
|
def api_health():
|
||||||
|
cfg = load_config()
|
||||||
|
cutoff = int(cfg.get("cutoff_hour", 23))
|
||||||
|
today_local = datetime.now().date()
|
||||||
|
r_today = DailyRevenue.query.filter_by(date=today_local).first()
|
||||||
|
return jsonify({
|
||||||
|
"server_now": datetime.now().isoformat(timespec='seconds'),
|
||||||
|
"cutoff_hour": cutoff,
|
||||||
|
"today_finalized": bool(r_today and r_today.is_final),
|
||||||
|
"today_amount": (r_today.amount if r_today else None)
|
||||||
|
})
|
||||||
|
|
||||||
@app.route("/api/export")
|
@app.route("/api/export")
|
||||||
def api_export():
|
def api_export():
|
||||||
"""CSV导出"""
|
"""CSV导出"""
|
||||||
@ -291,8 +344,21 @@ def admin_correct():
|
|||||||
db.session.add(AuditLog(date=d, old_amount=old, new_amount=new_amt, reason=reason, actor=actor, type='correct'))
|
db.session.add(AuditLog(date=d, old_amount=old, new_amount=new_amt, reason=reason, actor=actor, type='correct'))
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
_append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店'))
|
_append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店'))
|
||||||
|
push_feishu(d.isoformat(), new_amt, reason)
|
||||||
return jsonify({"ok": True})
|
return jsonify({"ok": True})
|
||||||
|
|
||||||
|
@app.route('/api/admin/test_push', methods=['POST'])
|
||||||
|
def admin_test_push():
|
||||||
|
token = os.getenv('ADMIN_TOKEN')
|
||||||
|
if token and request.headers.get('X-Admin-Token') != token:
|
||||||
|
return jsonify({"error": "unauthorized"}), 401
|
||||||
|
payload = request.get_json(silent=True) or {}
|
||||||
|
ds = payload.get('date') or datetime.now().date().isoformat()
|
||||||
|
amt = float(payload.get('amount') or 1234.56)
|
||||||
|
reason = payload.get('reason') or 'manual_test'
|
||||||
|
push_feishu(ds, amt, reason)
|
||||||
|
return jsonify({"ok": True, "pushed": {"date": ds, "amount": amt, "reason": reason}})
|
||||||
|
|
||||||
def import_csv_text(text: str, actor: str = 'admin'):
|
def import_csv_text(text: str, actor: str = 'admin'):
|
||||||
buf = io.StringIO(text)
|
buf = io.StringIO(text)
|
||||||
reader = csv.reader(buf)
|
reader = csv.reader(buf)
|
||||||
@ -320,6 +386,7 @@ def import_csv_text(text: str, actor: str = 'admin'):
|
|||||||
r = DailyRevenue(date=d, amount=amt, is_final=True, source='import_csv')
|
r = DailyRevenue(date=d, amount=amt, is_final=True, source='import_csv')
|
||||||
db.session.add(r)
|
db.session.add(r)
|
||||||
db.session.add(AuditLog(date=d, old_amount=old, new_amount=amt, reason='import_csv', actor=actor, type='import_log'))
|
db.session.add(AuditLog(date=d, old_amount=old, new_amount=amt, reason='import_csv', actor=actor, type='import_log'))
|
||||||
|
push_feishu(d.isoformat(), amt, 'import_csv')
|
||||||
imported += 1
|
imported += 1
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return imported
|
return imported
|
||||||
@ -363,11 +430,16 @@ def sync_log_to_db():
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
local_tz = get_localzone()
|
local_tz = get_localzone()
|
||||||
scheduler = BackgroundScheduler(timezone=local_tz)
|
scheduler = BackgroundScheduler(timezone=local_tz)
|
||||||
scheduler.add_job(daily_job, "cron", hour=23, minute=0)
|
try:
|
||||||
|
cutoff = int(load_config().get("cutoff_hour", 23))
|
||||||
|
except Exception:
|
||||||
|
cutoff = 23
|
||||||
|
scheduler.add_job(daily_job, "cron", hour=cutoff, minute=0)
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
sync_log_to_db()
|
sync_log_to_db()
|
||||||
auto_import_csv_on_start()
|
auto_import_csv_on_start()
|
||||||
|
settle_past_days()
|
||||||
settle_today_if_due()
|
settle_today_if_due()
|
||||||
app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000")))
|
app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000")))
|
||||||
|
|
||||||
@ -383,3 +455,74 @@ def sse_events():
|
|||||||
yield "data: {\"type\": \"force_refresh\"}\n\n"
|
yield "data: {\"type\": \"force_refresh\"}\n\n"
|
||||||
time.sleep(30)
|
time.sleep(30)
|
||||||
return Response(stream_with_context(event_stream()), mimetype='text/event-stream')
|
return Response(stream_with_context(event_stream()), mimetype='text/event-stream')
|
||||||
|
def push_feishu(date_str: str, amount: float, reason: str):
|
||||||
|
cfg = load_config()
|
||||||
|
url = cfg.get("feishu_webhook_url") or cfg.get("webhook_url")
|
||||||
|
if not url:
|
||||||
|
return
|
||||||
|
shop = cfg.get("shop_name", "益选便利店")
|
||||||
|
try:
|
||||||
|
d = datetime.strptime(date_str, '%Y-%m-%d').date()
|
||||||
|
except Exception:
|
||||||
|
d = datetime.now().date()
|
||||||
|
y = d - timedelta(days=1)
|
||||||
|
r = DailyRevenue.query.filter_by(date=y).first()
|
||||||
|
y_amt = (r.amount if (r and r.is_final) else None)
|
||||||
|
arrow = ''
|
||||||
|
diff_str = ''
|
||||||
|
pct_str = ''
|
||||||
|
if isinstance(y_amt, (int, float)):
|
||||||
|
diff = amount - y_amt
|
||||||
|
arrow = '🔺' if diff >= 0 else '🔻'
|
||||||
|
diff_str = f"{'+' if diff >= 0 else '-'}{abs(diff):.2f}"
|
||||||
|
if y_amt != 0:
|
||||||
|
pct = abs(diff / y_amt) * 100.0
|
||||||
|
pct_str = f"({'+' if diff >= 0 else '-'}{pct:.2f}%)"
|
||||||
|
today_line = f"**今日**:¥{amount:.2f}"
|
||||||
|
if isinstance(y_amt, (int, float)):
|
||||||
|
today_line += f" {arrow} {diff_str} {pct_str}".strip()
|
||||||
|
y_line = f"**昨日**:{('暂无数据' if y_amt is None else '¥' + format(y_amt, '.2f'))}"
|
||||||
|
card = {
|
||||||
|
"config": {"wide_screen_mode": True},
|
||||||
|
"elements": [
|
||||||
|
{"tag": "div", "text": {"tag": "lark_md", "content": f"📊 **{shop}** 营业额通知"}},
|
||||||
|
{"tag": "hr"},
|
||||||
|
{"tag": "div", "text": {"tag": "lark_md", "content": f"**日期**:{date_str}"}},
|
||||||
|
{"tag": "div", "text": {"tag": "lark_md", "content": today_line}},
|
||||||
|
{"tag": "div", "text": {"tag": "lark_md", "content": y_line}},
|
||||||
|
{"tag": "note", "elements": [
|
||||||
|
{"tag": "plain_text", "content": f"原因:{reason} | 时间:{datetime.now().isoformat(timespec='seconds')}"}
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
payload = {"msg_type": "interactive", "card": card}
|
||||||
|
secret = cfg.get("feishu_secret")
|
||||||
|
if secret:
|
||||||
|
ts = str(int(time.time()))
|
||||||
|
sign_src = ts + "\n" + secret
|
||||||
|
sign = base64.b64encode(hmac.new(secret.encode(), sign_src.encode(), digestmod=hashlib.sha256).digest()).decode()
|
||||||
|
payload.update({"timestamp": ts, "sign": sign})
|
||||||
|
def _log(s: str):
|
||||||
|
p = os.path.join(os.path.dirname(__file__), "..", "app.log")
|
||||||
|
with open(p, 'a', encoding='utf-8') as f:
|
||||||
|
f.write(s + "\n")
|
||||||
|
try:
|
||||||
|
print(s, flush=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
def _post_json(u: str, payload_obj: dict):
|
||||||
|
body = json.dumps(payload_obj, ensure_ascii=False).encode('utf-8')
|
||||||
|
return requests.post(u, data=body, headers={'Content-Type': 'application/json; charset=utf-8'}, timeout=5)
|
||||||
|
try:
|
||||||
|
resp = _post_json(url, payload)
|
||||||
|
ok = (200 <= resp.status_code < 300)
|
||||||
|
_log(f"飞书推送卡片{'成功' if ok else '失败'}: status={resp.status_code} {resp.text[:200]}")
|
||||||
|
if not ok:
|
||||||
|
text = f"📊 {shop}\n日期:{date_str}\n今日:¥{amount:.2f}"
|
||||||
|
if isinstance(y_amt, (int, float)):
|
||||||
|
text += f" {arrow} {diff_str} {pct_str}".strip()
|
||||||
|
text += f"\n原因:{reason}"
|
||||||
|
resp2 = _post_json(url, {"msg_type":"text","content":{"text": text}})
|
||||||
|
_log(f"飞书推送文本{'成功' if (200 <= resp2.status_code < 300) else '失败'}: status={resp2.status_code} {resp2.text[:200]}")
|
||||||
|
except Exception as e:
|
||||||
|
_log(f"飞书推送异常: {str(e)[:200]}")
|
||||||
|
|||||||
@ -5,3 +5,4 @@ APScheduler==3.10.4
|
|||||||
python-dotenv==1.0.0
|
python-dotenv==1.0.0
|
||||||
tzlocal==5.2
|
tzlocal==5.2
|
||||||
tzdata==2025.1
|
tzdata==2025.1
|
||||||
|
requests==2.32.3
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
{
|
{
|
||||||
"webhook_url": "https://api.hiflow.tencent.com/engine/webhook/31/1869391857524076545",
|
"webhook_url": "https://api.hiflow.tencent.com/engine/webhook/31/1869391857524076545",
|
||||||
|
"feishu_webhook_url": "https://open.feishu.cn/open-apis/bot/v2/hook/2ee06084-a0ed-4c3f-9b0b-e264194f7ddd",
|
||||||
|
"feishu_secret": "",
|
||||||
"shop_name": "益选便利店",
|
"shop_name": "益选便利店",
|
||||||
"weekday_range": [
|
"weekday_range": [
|
||||||
2900,
|
2900,
|
||||||
@ -8,5 +10,6 @@
|
|||||||
"weekend_range": [
|
"weekend_range": [
|
||||||
1600,
|
1600,
|
||||||
2000
|
2000
|
||||||
]
|
],
|
||||||
}
|
"cutoff_hour": 11
|
||||||
|
}
|
||||||
|
|||||||
BIN
data/data.db
BIN
data/data.db
Binary file not shown.
BIN
instance/data.db
BIN
instance/data.db
Binary file not shown.
62
scripts/set_yesterday.py
Normal file
62
scripts/set_yesterday.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
def _resolve_sqlite_path(url: str) -> str:
|
||||||
|
if not url.startswith('sqlite:///'):
|
||||||
|
raise ValueError('Only sqlite URLs are supported')
|
||||||
|
return url.replace('sqlite:///', '')
|
||||||
|
|
||||||
|
def ensure_tables(conn: sqlite3.Connection) -> None:
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS daily_revenue (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
date DATE NOT NULL UNIQUE,
|
||||||
|
amount REAL NOT NULL,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
is_final INTEGER NOT NULL DEFAULT 0,
|
||||||
|
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
source TEXT,
|
||||||
|
note TEXT
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS audit_log (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
old_amount REAL,
|
||||||
|
new_amount REAL,
|
||||||
|
reason TEXT,
|
||||||
|
actor TEXT,
|
||||||
|
type TEXT,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def set_yesterday(amount: float) -> None:
|
||||||
|
url = os.getenv('DATABASE_URL', 'sqlite:///data.db')
|
||||||
|
path = _resolve_sqlite_path(url)
|
||||||
|
os.makedirs(os.path.dirname(path) or '.', exist_ok=True)
|
||||||
|
conn = sqlite3.connect(path)
|
||||||
|
ensure_tables(conn)
|
||||||
|
cur = conn.cursor()
|
||||||
|
yday = (datetime.now().date() - timedelta(days=1)).isoformat()
|
||||||
|
cur.execute('SELECT amount FROM daily_revenue WHERE date=?', (yday,))
|
||||||
|
row = cur.fetchone()
|
||||||
|
old = row[0] if row else None
|
||||||
|
if row:
|
||||||
|
cur.execute('UPDATE daily_revenue SET amount=?, is_final=1, source=? , updated_at=CURRENT_TIMESTAMP WHERE date=?', (amount, 'correct', yday))
|
||||||
|
else:
|
||||||
|
cur.execute('INSERT INTO daily_revenue(date, amount, is_final, source) VALUES(?, ?, 1, ?)', (yday, amount, 'correct'))
|
||||||
|
cur.execute('INSERT INTO audit_log(date, old_amount, new_amount, reason, actor, type) VALUES(?, ?, ?, ?, ?, ?)', (yday, old, amount, 'manual_correct', 'admin', 'correct'))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
set_yesterday(3629.76)
|
||||||
Loading…
Reference in New Issue
Block a user