全提交
This commit is contained in:
parent
7498da05d4
commit
1ad735db2e
14
.dockerignore
Normal file
14
.dockerignore
Normal file
@ -0,0 +1,14 @@
|
||||
.git/
|
||||
.venv/
|
||||
venv/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
.env
|
||||
data/
|
||||
instance/
|
||||
*.db
|
||||
*.sqlite
|
||||
app.log
|
||||
node_modules/
|
||||
.trae/
|
||||
docs/
|
||||
20
Dockerfile
20
Dockerfile
@ -1,20 +1,22 @@
|
||||
# 多阶段构建:前端静态 → 后端镜像
|
||||
FROM python:3.11-slim
|
||||
FROM python:3.11-alpine
|
||||
WORKDIR /app
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
TZ=Asia/Shanghai
|
||||
RUN sed -i 's|deb.debian.org|mirrors.cloud.tencent.com|g' /etc/apt/sources.list && \
|
||||
sed -i 's|security.debian.org|mirrors.cloud.tencent.com|g' /etc/apt/sources.list && \
|
||||
apt-get update -o Acquire::Retries=3 -o Acquire::http::Timeout=30 && \
|
||||
apt-get install -y --no-install-recommends tzdata && \
|
||||
ln -fs /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && dpkg-reconfigure -f noninteractive tzdata && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
TZ=Asia/Shanghai \
|
||||
PIP_INDEX_URL=https://mirrors.cloud.tencent.com/pypi/simple \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||
AUTO_IMPORT_ON_START=1
|
||||
RUN apk add --no-cache tzdata && \
|
||||
cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && echo "Asia/Shanghai" > /etc/timezone
|
||||
COPY backend/requirements.txt .
|
||||
RUN pip install --no-cache-dir -i https://mirrors.cloud.tencent.com/pypi/simple -r requirements.txt
|
||||
RUN python -m pip install --no-cache-dir --upgrade pip -i $PIP_INDEX_URL && \
|
||||
pip install --no-cache-dir -r requirements.txt -i $PIP_INDEX_URL
|
||||
COPY backend/ ./backend/
|
||||
COPY frontend/ ./frontend/
|
||||
COPY config.json ./config.json
|
||||
COPY seed/ /app/init/
|
||||
RUN mkdir -p /app/data
|
||||
VOLUME ["/app/data"]
|
||||
EXPOSE 57778
|
||||
CMD ["python", "backend/app.py"]
|
||||
|
||||
@ -12,11 +12,27 @@ import random
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import csv
|
||||
import io
|
||||
|
||||
load_dotenv()
|
||||
app = Flask(__name__, static_folder="../frontend", static_url_path="/static")
|
||||
CORS(app)
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv("DATABASE_URL", "sqlite:///data.db")
|
||||
def _ensure_sqlite_dir(url):
|
||||
if not url.startswith('sqlite:'):
|
||||
return
|
||||
p = url
|
||||
if p.startswith('sqlite:////'):
|
||||
db_path = p.replace('sqlite:////', '/')
|
||||
elif p.startswith('sqlite:///'):
|
||||
db_path = os.path.join(os.getcwd(), p.replace('sqlite:///', ''))
|
||||
else:
|
||||
return
|
||||
d = os.path.dirname(db_path)
|
||||
if d and not os.path.exists(d):
|
||||
os.makedirs(d, exist_ok=True)
|
||||
_ensure_sqlite_dir(app.config['SQLALCHEMY_DATABASE_URI'])
|
||||
db = SQLAlchemy(app)
|
||||
|
||||
class DailyRevenue(db.Model):
|
||||
@ -258,6 +274,65 @@ def admin_correct():
|
||||
_append_log_line(d.isoformat(), new_amt, load_config().get('shop_name', '益选便利店'))
|
||||
return jsonify({"ok": True})
|
||||
|
||||
def import_csv_text(text: str, actor: str = 'admin'):
|
||||
buf = io.StringIO(text)
|
||||
reader = csv.reader(buf)
|
||||
imported = 0
|
||||
now = datetime.now().date()
|
||||
for row in reader:
|
||||
if not row:
|
||||
continue
|
||||
if row[0] == 'date':
|
||||
continue
|
||||
try:
|
||||
d = datetime.strptime(row[0].strip(), '%Y-%m-%d').date()
|
||||
if d > now:
|
||||
continue
|
||||
amt = float(row[1].strip())
|
||||
except Exception:
|
||||
continue
|
||||
r = DailyRevenue.query.filter_by(date=d).first()
|
||||
old = r.amount if r else None
|
||||
if r:
|
||||
r.amount = amt
|
||||
r.is_final = True
|
||||
r.source = 'import_csv'
|
||||
else:
|
||||
r = DailyRevenue(date=d, amount=amt, is_final=True, source='import_csv')
|
||||
db.session.add(r)
|
||||
db.session.add(AuditLog(date=d, old_amount=old, new_amount=amt, reason='import_csv', actor=actor, type='import_log'))
|
||||
imported += 1
|
||||
db.session.commit()
|
||||
return imported
|
||||
|
||||
@app.route('/api/admin/import', methods=['POST'])
|
||||
def admin_import():
|
||||
token = os.getenv('ADMIN_TOKEN')
|
||||
if token and request.headers.get('X-Admin-Token') != token:
|
||||
return jsonify({"error": "unauthorized"}), 401
|
||||
raw = request.get_data(as_text=True)
|
||||
if not raw:
|
||||
return jsonify({"error": "empty"}), 400
|
||||
imported = import_csv_text(raw, actor='admin')
|
||||
return jsonify({"ok": True, "imported": imported})
|
||||
|
||||
def auto_import_csv_on_start():
|
||||
with app.app_context():
|
||||
flag = os.getenv('AUTO_IMPORT_ON_START', '1')
|
||||
if str(flag) == '0':
|
||||
return
|
||||
paths = [
|
||||
os.path.join("/app", "data", "import.csv"),
|
||||
os.path.join(os.path.dirname(__file__), "..", "init", "revenue.csv"),
|
||||
os.path.join(os.path.dirname(__file__), "..", "init", "revenue.sample.csv"),
|
||||
]
|
||||
for p in paths:
|
||||
if os.path.exists(p):
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
text = f.read()
|
||||
import_csv_text(text, actor='bootstrap')
|
||||
break
|
||||
|
||||
def sync_log_to_db():
|
||||
"""启动时将 app.log 中缺失的数据同步到 DB(只同步今天之前)"""
|
||||
log_map = parse_app_log()
|
||||
@ -279,6 +354,7 @@ if __name__ == "__main__":
|
||||
scheduler.start()
|
||||
with app.app_context():
|
||||
sync_log_to_db()
|
||||
auto_import_csv_on_start()
|
||||
app.run(host="0.0.0.0", port=int(os.getenv("PORT", "5000")))
|
||||
|
||||
@app.route('/api/events')
|
||||
|
||||
@ -5,10 +5,10 @@ services:
|
||||
ports:
|
||||
- "57778:57778"
|
||||
environment:
|
||||
- DATABASE_URL=sqlite:///data/data.db
|
||||
- DATABASE_URL=sqlite:////app/data/data.db
|
||||
- TZ=Asia/Shanghai
|
||||
- ADMIN_TOKEN=${ADMIN_TOKEN}
|
||||
- PORT=57778
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./data:/app/data:Z
|
||||
restart: unless-stopped
|
||||
|
||||
Loading…
Reference in New Issue
Block a user