Compare commits
No commits in common. "d99a92b6dcf408d7a9381b8a8314393c747e7024" and "7498da05d4317c4ea31deb1bcdb214c74c486a3d" have entirely different histories.
d99a92b6dc
...
7498da05d4
@ -1,14 +0,0 @@
|
|||||||
.git/
|
|
||||||
.venv/
|
|
||||||
venv/
|
|
||||||
__pycache__/
|
|
||||||
*.pyc
|
|
||||||
.env
|
|
||||||
data/
|
|
||||||
instance/
|
|
||||||
*.db
|
|
||||||
*.sqlite
|
|
||||||
app.log
|
|
||||||
node_modules/
|
|
||||||
.trae/
|
|
||||||
docs/
|
|
||||||
20
Dockerfile
20
Dockerfile
@ -1,22 +1,20 @@
|
|||||||
# 多阶段构建:前端静态 → 后端镜像
|
# 多阶段构建:前端静态 → 后端镜像
|
||||||
FROM python:3.11-alpine
|
FROM python:3.11-slim
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
TZ=Asia/Shanghai \
|
TZ=Asia/Shanghai
|
||||||
PIP_INDEX_URL=https://mirrors.cloud.tencent.com/pypi/simple \
|
RUN sed -i 's|deb.debian.org|mirrors.cloud.tencent.com|g' /etc/apt/sources.list && \
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
sed -i 's|security.debian.org|mirrors.cloud.tencent.com|g' /etc/apt/sources.list && \
|
||||||
AUTO_IMPORT_ON_START=1
|
apt-get update -o Acquire::Retries=3 -o Acquire::http::Timeout=30 && \
|
||||||
RUN apk add --no-cache tzdata && \
|
apt-get install -y --no-install-recommends tzdata && \
|
||||||
cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && echo "Asia/Shanghai" > /etc/timezone
|
ln -fs /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && dpkg-reconfigure -f noninteractive tzdata && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
COPY backend/requirements.txt .
|
COPY backend/requirements.txt .
|
||||||
RUN python -m pip install --no-cache-dir --upgrade pip -i $PIP_INDEX_URL && \
|
RUN pip install --no-cache-dir -i https://mirrors.cloud.tencent.com/pypi/simple -r requirements.txt
|
||||||
pip install --no-cache-dir -r requirements.txt -i $PIP_INDEX_URL
|
|
||||||
COPY backend/ ./backend/
|
COPY backend/ ./backend/
|
||||||
COPY frontend/ ./frontend/
|
COPY frontend/ ./frontend/
|
||||||
COPY config.json ./config.json
|
COPY config.json ./config.json
|
||||||
COPY seed/ /app/init/
|
|
||||||
RUN mkdir -p /app/data
|
|
||||||
VOLUME ["/app/data"]
|
VOLUME ["/app/data"]
|
||||||
EXPOSE 57778
|
EXPOSE 57778
|
||||||
CMD ["python", "backend/app.py"]
|
CMD ["python", "backend/app.py"]
|
||||||
|
|||||||
26
README.md
26
README.md
@ -27,7 +27,6 @@ docker-compose up -d
|
|||||||
.
|
.
|
||||||
├── backend/ # Flask API + APScheduler
|
├── backend/ # Flask API + APScheduler
|
||||||
├── frontend/ # 单页 HTML(Tailwind+Chart.js)
|
├── frontend/ # 单页 HTML(Tailwind+Chart.js)
|
||||||
├── seed/ # 预置数据CSV(revenue.sample.csv),构建时复制到 /app/init/
|
|
||||||
├── data/ # SQLite 数据卷(自动创建/挂载)
|
├── data/ # SQLite 数据卷(自动创建/挂载)
|
||||||
├── Dockerfile # 生产镜像(已适配腾讯云)
|
├── Dockerfile # 生产镜像(已适配腾讯云)
|
||||||
├── docker-compose.yml # 一键部署
|
├── docker-compose.yml # 一键部署
|
||||||
@ -49,31 +48,6 @@ curl -X PUT http://localhost:5000/api/admin/turnover \
|
|||||||
-d '{"date":"2025-12-06","amount":3123.45,"reason":"调整入账"}'
|
-d '{"date":"2025-12-06","amount":3123.45,"reason":"调整入账"}'
|
||||||
```
|
```
|
||||||
|
|
||||||
## 数据预置与迁移
|
|
||||||
- 首次启动自动导入(DB为空时):
|
|
||||||
1. 如果存在 `/app/data/import.csv`,优先导入该文件
|
|
||||||
2. 否则导入镜像内 `/app/init/revenue.csv`(可将 `seed/revenue.sample.csv` 重命名为 `revenue.csv`)
|
|
||||||
- CSV格式:
|
|
||||||
```
|
|
||||||
date,amount
|
|
||||||
2025-12-01,12345.67
|
|
||||||
2025-12-02,11890.12
|
|
||||||
```
|
|
||||||
- 手动导入接口:
|
|
||||||
```bash
|
|
||||||
curl -X POST http://<服务器>:57778/api/admin/import \
|
|
||||||
-H "X-Admin-Token: $ADMIN_TOKEN" -H "Content-Type: text/csv" \
|
|
||||||
--data-binary @revenue.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
## 环境优化(最优解)
|
|
||||||
- 基础镜像切换为 `python:3.11-alpine`,构建更快、体积更小
|
|
||||||
- 依赖安装走腾讯云 PyPI 镜像:`PIP_INDEX_URL=https://mirrors.cloud.tencent.com/pypi/simple`
|
|
||||||
- 自动数据导入:环境变量 `AUTO_IMPORT_ON_START=1`,启动时按顺序导入
|
|
||||||
1. `/app/data/import.csv`(宿主机 `./data/import.csv`)
|
|
||||||
2. `/app/init/revenue.csv`(仓库 `seed/revenue.csv`)
|
|
||||||
3. `/app/init/revenue.sample.csv`
|
|
||||||
|
|
||||||
## 生产建议
|
## 生产建议
|
||||||
- 建议使用反向代理(Nginx)暴露 `5000` 端口,并开启 gzip
|
- 建议使用反向代理(Nginx)暴露 `5000` 端口,并开启 gzip
|
||||||
- 建议通过 `docker-compose` 的 `restart: unless-stopped` 保持服务稳定
|
- 建议通过 `docker-compose` 的 `restart: unless-stopped` 保持服务稳定
|
||||||
|
|||||||
@ -12,27 +12,11 @@ import random
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import csv
|
|
||||||
import io
|
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
app = Flask(__name__, static_folder="../frontend", static_url_path="/static")
|
app = Flask(__name__, static_folder="../frontend", static_url_path="/static")
|
||||||
CORS(app)
|
CORS(app)
|
||||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv("DATABASE_URL", "sqlite:///data.db")
|
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv("DATABASE_URL", "sqlite:///data.db")
|
||||||
def _ensure_sqlite_dir(url):
|
|
||||||
if not url.startswith('sqlite:'):
|
|
||||||
return
|
|
||||||
p = url
|
|
||||||
if p.startswith('sqlite:////'):
|
|
||||||
db_path = p.replace('sqlite:////', '/')
|
|
||||||
elif p.startswith('sqlite:///'):
|
|
||||||
db_path = os.path.join(os.getcwd(), p.replace('sqlite:///', ''))
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
d = os.path.dirname(db_path)
|
|
||||||
if d and not os.path.exists(d):
|
|
||||||
os.makedirs(d, exist_ok=True)
|
|
||||||
_ensure_sqlite_dir(app.config['SQLALCHEMY_DATABASE_URI'])
|
|
||||||
db = SQLAlchemy(app)
|
db = SQLAlchemy(app)
|
||||||
|
|
||||||
class DailyRevenue(db.Model):
|
class DailyRevenue(db.Model):
|
||||||
@ -274,65 +258,6 @@ def admin_correct():
|
|||||||
_append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店'))
|
_append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店'))
|
||||||
return jsonify({"ok": True})
|
return jsonify({"ok": True})
|
||||||
|
|
||||||
def import_csv_text(text: str, actor: str = 'admin'):
|
|
||||||
buf = io.StringIO(text)
|
|
||||||
reader = csv.reader(buf)
|
|
||||||
imported = 0
|
|
||||||
now = datetime.now().date()
|
|
||||||
for row in reader:
|
|
||||||
if not row:
|
|
||||||
continue
|
|
||||||
if row[0] == 'date':
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
d = datetime.strptime(row[0].strip(), '%Y-%m-%d').date()
|
|
||||||
if d > now:
|
|
||||||
continue
|
|
||||||
amt = float(row[1].strip())
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
r = DailyRevenue.query.filter_by(date=d).first()
|
|
||||||
old = r.amount if r else None
|
|
||||||
if r:
|
|
||||||
r.amount = amt
|
|
||||||
r.is_final = True
|
|
||||||
r.source = 'import_csv'
|
|
||||||
else:
|
|
||||||
r = DailyRevenue(date=d, amount=amt, is_final=True, source='import_csv')
|
|
||||||
db.session.add(r)
|
|
||||||
db.session.add(AuditLog(date=d, old_amount=old, new_amount=amt, reason='import_csv', actor=actor, type='import_log'))
|
|
||||||
imported += 1
|
|
||||||
db.session.commit()
|
|
||||||
return imported
|
|
||||||
|
|
||||||
@app.route('/api/admin/import', methods=['POST'])
|
|
||||||
def admin_import():
|
|
||||||
token = os.getenv('ADMIN_TOKEN')
|
|
||||||
if token and request.headers.get('X-Admin-Token') != token:
|
|
||||||
return jsonify({"error": "unauthorized"}), 401
|
|
||||||
raw = request.get_data(as_text=True)
|
|
||||||
if not raw:
|
|
||||||
return jsonify({"error": "empty"}), 400
|
|
||||||
imported = import_csv_text(raw, actor='admin')
|
|
||||||
return jsonify({"ok": True, "imported": imported})
|
|
||||||
|
|
||||||
def auto_import_csv_on_start():
|
|
||||||
with app.app_context():
|
|
||||||
flag = os.getenv('AUTO_IMPORT_ON_START', '1')
|
|
||||||
if str(flag) == '0':
|
|
||||||
return
|
|
||||||
paths = [
|
|
||||||
os.path.join("/app", "data", "import.csv"),
|
|
||||||
os.path.join(os.path.dirname(__file__), "..", "init", "revenue.csv"),
|
|
||||||
os.path.join(os.path.dirname(__file__), "..", "init", "revenue.sample.csv"),
|
|
||||||
]
|
|
||||||
for p in paths:
|
|
||||||
if os.path.exists(p):
|
|
||||||
with open(p, "r", encoding="utf-8") as f:
|
|
||||||
text = f.read()
|
|
||||||
import_csv_text(text, actor='bootstrap')
|
|
||||||
break
|
|
||||||
|
|
||||||
def sync_log_to_db():
|
def sync_log_to_db():
|
||||||
"""启动时将 app.log 中缺失的数据同步到 DB(只同步今天之前)"""
|
"""启动时将 app.log 中缺失的数据同步到 DB(只同步今天之前)"""
|
||||||
log_map = parse_app_log()
|
log_map = parse_app_log()
|
||||||
@ -354,7 +279,6 @@ if __name__ == "__main__":
|
|||||||
scheduler.start()
|
scheduler.start()
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
sync_log_to_db()
|
sync_log_to_db()
|
||||||
auto_import_csv_on_start()
|
|
||||||
app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000")))
|
app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000")))
|
||||||
|
|
||||||
@app.route('/api/events')
|
@app.route('/api/events')
|
||||||
|
|||||||
@ -5,11 +5,10 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "57778:57778"
|
- "57778:57778"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=sqlite:////app/data/data.db
|
- DATABASE_URL=sqlite:///data/data.db
|
||||||
- TZ=Asia/Shanghai
|
- TZ=Asia/Shanghai
|
||||||
- ADMIN_TOKEN=${ADMIN_TOKEN}
|
- ADMIN_TOKEN=${ADMIN_TOKEN}
|
||||||
- PORT=57778
|
- PORT=57778
|
||||||
- AUTO_IMPORT_ON_START=1
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./data:/app/data:Z
|
- ./data:/app/data
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|||||||
@ -1,6 +0,0 @@
|
|||||||
date,amount
|
|
||||||
2025-12-01,12345.67
|
|
||||||
2025-12-02,11890.12
|
|
||||||
2025-12-03,13200.00
|
|
||||||
2025-12-04,11110.50
|
|
||||||
2025-12-05,14005.88
|
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
date,amount
|
|
||||||
2025-12-01,12345.67
|
|
||||||
2025-12-02,11890.12
|
|
||||||
2025-12-03,13200.00
|
|
||||||
2025-12-04,11110.50
|
|
||||||
2025-12-05,14005.88
|
|
||||||
|
Loading…
Reference in New Issue
Block a user