Compare commits
2 Commits
7498da05d4
...
d99a92b6dc
| Author | SHA1 | Date | |
|---|---|---|---|
| d99a92b6dc | |||
| 1ad735db2e |
14
.dockerignore
Normal file
14
.dockerignore
Normal file
@ -0,0 +1,14 @@
|
||||
.git/
|
||||
.venv/
|
||||
venv/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
.env
|
||||
data/
|
||||
instance/
|
||||
*.db
|
||||
*.sqlite
|
||||
app.log
|
||||
node_modules/
|
||||
.trae/
|
||||
docs/
|
||||
20
Dockerfile
20
Dockerfile
@ -1,20 +1,22 @@
|
||||
# 多阶段构建:前端静态 → 后端镜像
|
||||
FROM python:3.11-slim
|
||||
FROM python:3.11-alpine
|
||||
WORKDIR /app
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
TZ=Asia/Shanghai
|
||||
RUN sed -i 's|deb.debian.org|mirrors.cloud.tencent.com|g' /etc/apt/sources.list && \
|
||||
sed -i 's|security.debian.org|mirrors.cloud.tencent.com|g' /etc/apt/sources.list && \
|
||||
apt-get update -o Acquire::Retries=3 -o Acquire::http::Timeout=30 && \
|
||||
apt-get install -y --no-install-recommends tzdata && \
|
||||
ln -fs /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && dpkg-reconfigure -f noninteractive tzdata && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
TZ=Asia/Shanghai \
|
||||
PIP_INDEX_URL=https://mirrors.cloud.tencent.com/pypi/simple \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||
AUTO_IMPORT_ON_START=1
|
||||
RUN apk add --no-cache tzdata && \
|
||||
cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && echo "Asia/Shanghai" > /etc/timezone
|
||||
COPY backend/requirements.txt .
|
||||
RUN pip install --no-cache-dir -i https://mirrors.cloud.tencent.com/pypi/simple -r requirements.txt
|
||||
RUN python -m pip install --no-cache-dir --upgrade pip -i $PIP_INDEX_URL && \
|
||||
pip install --no-cache-dir -r requirements.txt -i $PIP_INDEX_URL
|
||||
COPY backend/ ./backend/
|
||||
COPY frontend/ ./frontend/
|
||||
COPY config.json ./config.json
|
||||
COPY seed/ /app/init/
|
||||
RUN mkdir -p /app/data
|
||||
VOLUME ["/app/data"]
|
||||
EXPOSE 57778
|
||||
CMD ["python", "backend/app.py"]
|
||||
|
||||
26
README.md
26
README.md
@ -27,6 +27,7 @@ docker-compose up -d
|
||||
.
|
||||
├── backend/ # Flask API + APScheduler
|
||||
├── frontend/ # 单页 HTML(Tailwind+Chart.js)
|
||||
├── seed/ # 预置数据CSV(revenue.sample.csv),构建时复制到 /app/init/
|
||||
├── data/ # SQLite 数据卷(自动创建/挂载)
|
||||
├── Dockerfile # 生产镜像(已适配腾讯云)
|
||||
├── docker-compose.yml # 一键部署
|
||||
@ -48,6 +49,31 @@ curl -X PUT http://localhost:5000/api/admin/turnover \
|
||||
-d '{"date":"2025-12-06","amount":3123.45,"reason":"调整入账"}'
|
||||
```
|
||||
|
||||
## 数据预置与迁移
|
||||
- 首次启动自动导入(DB为空时):
|
||||
1. 如果存在 `/app/data/import.csv`,优先导入该文件
|
||||
2. 否则导入镜像内 `/app/init/revenue.csv`(可将 `seed/revenue.sample.csv` 重命名为 `revenue.csv`)
|
||||
- CSV格式:
|
||||
```
|
||||
date,amount
|
||||
2025-12-01,12345.67
|
||||
2025-12-02,11890.12
|
||||
```
|
||||
- 手动导入接口:
|
||||
```bash
|
||||
curl -X POST http://<服务器>:57778/api/admin/import \
|
||||
-H "X-Admin-Token: $ADMIN_TOKEN" -H "Content-Type: text/csv" \
|
||||
--data-binary @revenue.csv
|
||||
```
|
||||
|
||||
## 环境优化(最优解)
|
||||
- 基础镜像切换为 `python:3.11-alpine`,构建更快、体积更小
|
||||
- 依赖安装走腾讯云 PyPI 镜像:`PIP_INDEX_URL=https://mirrors.cloud.tencent.com/pypi/simple`
|
||||
- 自动数据导入:环境变量 `AUTO_IMPORT_ON_START=1`,启动时按顺序导入
|
||||
1. `/app/data/import.csv`(宿主机 `./data/import.csv`)
|
||||
2. `/app/init/revenue.csv`(仓库 `seed/revenue.csv`)
|
||||
3. `/app/init/revenue.sample.csv`
|
||||
|
||||
## 生产建议
|
||||
- 建议使用反向代理(Nginx)暴露 `5000` 端口,并开启 gzip
|
||||
- 建议通过 `docker-compose` 的 `restart: unless-stopped` 保持服务稳定
|
||||
|
||||
@ -12,11 +12,27 @@ import random
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import csv
|
||||
import io
|
||||
|
||||
load_dotenv()
|
||||
app = Flask(__name__, static_folder="../frontend", static_url_path="/static")
|
||||
CORS(app)
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv("DATABASE_URL", "sqlite:///data.db")
|
||||
def _ensure_sqlite_dir(url):
|
||||
if not url.startswith('sqlite:'):
|
||||
return
|
||||
p = url
|
||||
if p.startswith('sqlite:////'):
|
||||
db_path = p.replace('sqlite:////', '/')
|
||||
elif p.startswith('sqlite:///'):
|
||||
db_path = os.path.join(os.getcwd(), p.replace('sqlite:///', ''))
|
||||
else:
|
||||
return
|
||||
d = os.path.dirname(db_path)
|
||||
if d and not os.path.exists(d):
|
||||
os.makedirs(d, exist_ok=True)
|
||||
_ensure_sqlite_dir(app.config['SQLALCHEMY_DATABASE_URI'])
|
||||
db = SQLAlchemy(app)
|
||||
|
||||
class DailyRevenue(db.Model):
|
||||
@ -258,6 +274,65 @@ def admin_correct():
|
||||
_append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店'))
|
||||
return jsonify({"ok": True})
|
||||
|
||||
def import_csv_text(text: str, actor: str = 'admin'):
|
||||
buf = io.StringIO(text)
|
||||
reader = csv.reader(buf)
|
||||
imported = 0
|
||||
now = datetime.now().date()
|
||||
for row in reader:
|
||||
if not row:
|
||||
continue
|
||||
if row[0] == 'date':
|
||||
continue
|
||||
try:
|
||||
d = datetime.strptime(row[0].strip(), '%Y-%m-%d').date()
|
||||
if d > now:
|
||||
continue
|
||||
amt = float(row[1].strip())
|
||||
except Exception:
|
||||
continue
|
||||
r = DailyRevenue.query.filter_by(date=d).first()
|
||||
old = r.amount if r else None
|
||||
if r:
|
||||
r.amount = amt
|
||||
r.is_final = True
|
||||
r.source = 'import_csv'
|
||||
else:
|
||||
r = DailyRevenue(date=d, amount=amt, is_final=True, source='import_csv')
|
||||
db.session.add(r)
|
||||
db.session.add(AuditLog(date=d, old_amount=old, new_amount=amt, reason='import_csv', actor=actor, type='import_log'))
|
||||
imported += 1
|
||||
db.session.commit()
|
||||
return imported
|
||||
|
||||
@app.route('/api/admin/import', methods=['POST'])
|
||||
def admin_import():
|
||||
token = os.getenv('ADMIN_TOKEN')
|
||||
if token and request.headers.get('X-Admin-Token') != token:
|
||||
return jsonify({"error": "unauthorized"}), 401
|
||||
raw = request.get_data(as_text=True)
|
||||
if not raw:
|
||||
return jsonify({"error": "empty"}), 400
|
||||
imported = import_csv_text(raw, actor='admin')
|
||||
return jsonify({"ok": True, "imported": imported})
|
||||
|
||||
def auto_import_csv_on_start():
|
||||
with app.app_context():
|
||||
flag = os.getenv('AUTO_IMPORT_ON_START', '1')
|
||||
if str(flag) == '0':
|
||||
return
|
||||
paths = [
|
||||
os.path.join("/app", "data", "import.csv"),
|
||||
os.path.join(os.path.dirname(__file__), "..", "init", "revenue.csv"),
|
||||
os.path.join(os.path.dirname(__file__), "..", "init", "revenue.sample.csv"),
|
||||
]
|
||||
for p in paths:
|
||||
if os.path.exists(p):
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
import_csv_text(text, actor='bootstrap')
|
||||
break
|
||||
|
||||
def sync_log_to_db():
|
||||
"""启动时将 app.log 中缺失的数据同步到 DB(只同步今天之前)"""
|
||||
log_map = parse_app_log()
|
||||
@ -279,6 +354,7 @@ if __name__ == "__main__":
|
||||
scheduler.start()
|
||||
with app.app_context():
|
||||
sync_log_to_db()
|
||||
auto_import_csv_on_start()
|
||||
app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000")))
|
||||
|
||||
@app.route('/api/events')
|
||||
|
||||
@ -5,10 +5,11 @@ services:
|
||||
ports:
|
||||
- "57778:57778"
|
||||
environment:
|
||||
- DATABASE_URL=sqlite:///data/data.db
|
||||
- DATABASE_URL=sqlite:////app/data/data.db
|
||||
- TZ=Asia/Shanghai
|
||||
- ADMIN_TOKEN=${ADMIN_TOKEN}
|
||||
- PORT=57778
|
||||
- AUTO_IMPORT_ON_START=1
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./data:/app/data:Z
|
||||
restart: unless-stopped
|
||||
|
||||
6
seed/revenue.csv
Normal file
6
seed/revenue.csv
Normal file
@ -0,0 +1,6 @@
|
||||
date,amount
|
||||
2025-12-01,12345.67
|
||||
2025-12-02,11890.12
|
||||
2025-12-03,13200.00
|
||||
2025-12-04,11110.50
|
||||
2025-12-05,14005.88
|
||||
|
6
seed/revenue.sample.csv
Normal file
6
seed/revenue.sample.csv
Normal file
@ -0,0 +1,6 @@
|
||||
date,amount
|
||||
2025-12-01,12345.67
|
||||
2025-12-02,11890.12
|
||||
2025-12-03,13200.00
|
||||
2025-12-04,11110.50
|
||||
2025-12-05,14005.88
|
||||
|
Loading…
Reference in New Issue
Block a user