Initial commit
This commit is contained in:
36
.env
Normal file
36
.env
Normal file
@@ -0,0 +1,36 @@
|
||||
# Option A: single URL (recommended)
|
||||
DATABASE_URL=postgresql+psycopg2://admin:Package%40123%23@31.97.228.132:5432/pgworkolik
|
||||
DB_ECHO=false
|
||||
|
||||
# App Settings
|
||||
APP_SECRET=80khAhsZiYbCXB_mehHfGZ-oAhmU9jxPp8AR11AUuvWz-wpUgIXliqVOfNihYIhV
|
||||
|
||||
|
||||
# Option B: parts (no DATABASE_URL needed if you set all parts)
|
||||
DB_HOST=31.97.228.132
|
||||
DB_PORT=5432
|
||||
DB_NAME=pgworkolik
|
||||
DB_USER=admin
|
||||
DB_PASSWORD=Package@123#
|
||||
DB_ECHO=false
|
||||
|
||||
# App Settings
|
||||
APP_SECRET=80khAhsZiYbCXB_mehHfGZ-oAhmU9jxPp8AR11AUuvWz-wpUgIXliqVOfNihYIhV
|
||||
BACKGROUND_IMAGE_URL=assets/bg.jpg
|
||||
|
||||
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=workolik360@gmail.com
|
||||
SMTP_PASSWORD=nggo euhg chus yyyw
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_FROM_EMAIL=workolik360@gmail.com
|
||||
SMTP_FROM_NAME=Workolik Team
|
||||
|
||||
REPORT_RECIPIENTS=Darshan@caman.au,darshan@caman.com.au,workolik360@gmail.com,ColinA@caman.au,ColinA@caman.com.au,tabs@tuckerfresh.com.au,jay@tuckerfresh.com.au,sanjay@tuckerfresh.com.au,veer@tuckerfresh.com.au
|
||||
|
||||
|
||||
BCC_RECIPIENTS=fazulilahi@gmail.com
|
||||
|
||||
# Darshan@caman.au,ColinA@caman.au,tabs@tuckerfresh.com.au,
|
||||
# jay@tuckerfresh.com.au
|
||||
13
.streamlit/config.toml
Normal file
13
.streamlit/config.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[theme]
|
||||
base = "light"
|
||||
primaryColor = "#4F46E5"
|
||||
backgroundColor = "#F8FAFC"
|
||||
secondaryBackgroundColor = "#FFFFFF"
|
||||
textColor = "#0F172A"
|
||||
font = "sans serif"
|
||||
|
||||
[client]
|
||||
showSidebarNavigation = false
|
||||
|
||||
[server]
|
||||
fileWatcherType = "poll"
|
||||
22
Dockerfile
Normal file
22
Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# System deps (timezone data for TZ support)
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends tzdata && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY requirements.txt /app/
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
EXPOSE 8501
|
||||
|
||||
CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
||||
|
||||
|
||||
18
README.md
Normal file
18
README.md
Normal file
@@ -0,0 +1,18 @@
|
||||
## TuckerF Workolik - Streamlit App
|
||||
|
||||
A modern Streamlit application with Postgres-backed authentication and a clean architecture. Includes three pages: See logs, See payload, and Mailer.
|
||||
|
||||
### Quickstart
|
||||
1. Create a `.env` file from `.env.example` and fill values.
|
||||
2. Install dependencies:
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
3. Run the app:
|
||||
```
|
||||
streamlit run app.py
|
||||
```
|
||||
|
||||
### Notes
|
||||
- `pages/` lives at project root (Streamlit requirement).
|
||||
- All other implementation code is under `app_core/` for clean structure.
|
||||
101
app.py
Normal file
101
app.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import streamlit as st
|
||||
from app_core.config.settings import AppSettings
|
||||
from app_core.services.auth_service import AuthService
|
||||
from app_core.ui.auth_ui import render_auth_card
|
||||
from app_core.ui.layout import apply_global_style, render_topbar, render_header, render_sidebar_logo
|
||||
|
||||
settings = AppSettings() # loads env
|
||||
|
||||
# App config
|
||||
st.set_page_config(
|
||||
page_title="Workolik",
|
||||
page_icon="assets/workolik.png",
|
||||
layout="wide",
|
||||
initial_sidebar_state="expanded",
|
||||
)
|
||||
|
||||
apply_global_style(background_url=os.getenv("BACKGROUND_IMAGE_URL"))
|
||||
|
||||
auth_service = AuthService()
|
||||
|
||||
# Initialize session state
|
||||
if "auth_user" not in st.session_state:
|
||||
st.session_state.auth_user = None
|
||||
|
||||
# ✅ FIXED MENU (no emojis here)
|
||||
menu = ["Analytics", "Data", "Mailer", "Mappings"]
|
||||
|
||||
# ✅ ICON MAP
|
||||
icons = {
|
||||
"Analytics": "📊",
|
||||
"Data": "📦",
|
||||
"Mailer": "✉️",
|
||||
"Mappings": "📋"
|
||||
}
|
||||
|
||||
if st.session_state.auth_user is None:
|
||||
render_topbar()
|
||||
st.markdown('<style>[data-testid="stSidebar"]{display:none;}</style>', unsafe_allow_html=True)
|
||||
render_auth_card(auth_service)
|
||||
st.stop()
|
||||
|
||||
# Topbar
|
||||
render_topbar()
|
||||
|
||||
# Dim background
|
||||
st.markdown("""
|
||||
<style>
|
||||
.stApp::before { opacity: 0.1 !important; }
|
||||
</style>
|
||||
""", unsafe_allow_html=True)
|
||||
|
||||
with st.sidebar:
|
||||
render_sidebar_logo()
|
||||
|
||||
st.markdown('<div class="sidebar-content">', unsafe_allow_html=True)
|
||||
|
||||
# Navigation
|
||||
st.markdown('<div class="sidebar-section">', unsafe_allow_html=True)
|
||||
st.markdown("### Navigation")
|
||||
|
||||
choice = st.selectbox(
|
||||
"Page",
|
||||
menu,
|
||||
index=0,
|
||||
format_func=lambda x: f"{icons.get(x, '')} {x}" # ✅ Safe rendering
|
||||
)
|
||||
|
||||
st.markdown('</div>', unsafe_allow_html=True)
|
||||
|
||||
# Spacer
|
||||
st.markdown('<div class="sidebar-spacer"></div>', unsafe_allow_html=True)
|
||||
|
||||
# Logout Section
|
||||
st.markdown('<div class="sidebar-logout">', unsafe_allow_html=True)
|
||||
st.caption(f"Logged in as: {st.session_state.auth_user['email']}")
|
||||
|
||||
if st.button("Logout", type="secondary"):
|
||||
st.session_state.auth_user = None
|
||||
st.rerun()
|
||||
|
||||
st.markdown('</div>', unsafe_allow_html=True)
|
||||
st.markdown('</div>', unsafe_allow_html=True)
|
||||
|
||||
# ✅ ROUTING FIXED (no emojis in condition)
|
||||
if choice == "Analytics":
|
||||
from pages.see_logs import render_page
|
||||
render_page()
|
||||
|
||||
elif choice == "Data":
|
||||
from pages.see_payload import render_page
|
||||
render_page()
|
||||
|
||||
elif choice == "Mailer":
|
||||
from pages.mailer import render_page
|
||||
render_page()
|
||||
|
||||
elif choice == "Mappings":
|
||||
from pages.mappings import render_page
|
||||
render_page()
|
||||
1
app_core/__init__.py
Normal file
1
app_core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# App Core Package
|
||||
BIN
app_core/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
app_core/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
1
app_core/config/__init__.py
Normal file
1
app_core/config/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Config Package
|
||||
BIN
app_core/config/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
app_core/config/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/config/__pycache__/settings.cpython-312.pyc
Normal file
BIN
app_core/config/__pycache__/settings.cpython-312.pyc
Normal file
Binary file not shown.
78
app_core/config/settings.py
Normal file
78
app_core/config/settings.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from pydantic import BaseModel
|
||||
|
||||
# Load .env first (if present)
|
||||
load_dotenv(dotenv_path=".env", override=False)
|
||||
# Also load .env-example.txt as a fallback for local dev (does not override)
|
||||
load_dotenv(dotenv_path=".env-example.txt", override=False)
|
||||
|
||||
class AppSettings(BaseModel):
|
||||
# Raw pieces
|
||||
db_host: str | None = os.getenv("DB_HOST")
|
||||
db_port: str | None = os.getenv("DB_PORT")
|
||||
db_name: str | None = os.getenv("DB_NAME")
|
||||
db_user: str | None = os.getenv("DB_USER")
|
||||
db_password: str | None = os.getenv("DB_PASSWORD")
|
||||
db_echo: bool = os.getenv("DB_ECHO", "false").lower() == "true"
|
||||
|
||||
# Optional complete URL (takes precedence if set)
|
||||
database_url_env: str | None = os.getenv("DATABASE_URL")
|
||||
|
||||
app_secret: str = os.getenv("APP_SECRET", "change_me")
|
||||
background_image_url: str | None = os.getenv("BACKGROUND_IMAGE_URL")
|
||||
|
||||
# SMTP / Email settings
|
||||
smtp_host: str | None = os.getenv("SMTP_HOST")
|
||||
smtp_port: int | None = int(os.getenv("SMTP_PORT", "587"))
|
||||
smtp_user: str | None = os.getenv("SMTP_USER")
|
||||
smtp_password: str | None = os.getenv("SMTP_PASSWORD")
|
||||
smtp_use_tls: bool = os.getenv("SMTP_USE_TLS", "true").lower() == "true"
|
||||
smtp_from_email: str | None = os.getenv("SMTP_FROM_EMAIL")
|
||||
smtp_from_name: str = os.getenv("SMTP_FROM_NAME", "Workolik Team")
|
||||
|
||||
# Default recipients for automated reports (comma-separated)
|
||||
report_recipients: str | None = os.getenv("REPORT_RECIPIENTS")
|
||||
|
||||
@property
|
||||
def database_url(self) -> str:
|
||||
if self.database_url_env:
|
||||
# Normalize asyncpg to psycopg2 if needed
|
||||
if self.database_url_env.startswith("postgresql+asyncpg://"):
|
||||
return self.database_url_env.replace(
|
||||
"postgresql+asyncpg://", "postgresql+psycopg2://", 1
|
||||
)
|
||||
return self.database_url_env
|
||||
# Build from parts
|
||||
if all([self.db_host, self.db_port, self.db_name, self.db_user, self.db_password]):
|
||||
return (
|
||||
f"postgresql+psycopg2://{self.db_user}:{self.db_password}"
|
||||
f"@{self.db_host}:{self.db_port}/{self.db_name}"
|
||||
)
|
||||
# Fallback empty (will error at runtime if used)
|
||||
return ""
|
||||
|
||||
# Fixed mapping of stores to tenant IDs and division codes
|
||||
# Used by analytics and data pages to scope queries per store
|
||||
STORES = [
|
||||
{"label": "Porters Liquor Claremont - PC", "code": "PC", "tenant_id": 1},
|
||||
{"label": "Porters Iluka - IP", "code": "IP", "tenant_id": 2},
|
||||
{"label": "Cellarbrations at Morris Place - ML", "code": "ML", "tenant_id": 3},
|
||||
{"label": "Cellarbrations at Lynwood - CL", "code": "CL4", "tenant_id": 4},
|
||||
{"label": "Cellarbrations at Nicholson Road - NL", "code": "NL", "tenant_id": 5},
|
||||
{"label": "Cellarbrations at Treeby - CL ", "code": "CL6", "tenant_id": 6},
|
||||
{"label": "The Bottle-O Rossmoyne - RC", "code": "RC", "tenant_id": 7},
|
||||
{"label": "Porters Liquor Piara Waters - PL", "code": "PL", "tenant_id": 8},
|
||||
]
|
||||
|
||||
# Helper map for quick lookups by code (supports variants like CL-4 → CL4)
|
||||
STORE_CODE_TO_TENANT_ID: dict[str, int] = {
|
||||
"PC": 1,
|
||||
"IP": 2,
|
||||
"ML": 3,
|
||||
"CL4": 4, "CL-4": 4, "CL_4": 4, "CL": 4, # default CL → 4
|
||||
"NL": 5, "NL5": 5, "NL-5": 5,
|
||||
"CL6": 6, "CL-6": 6, "CL_6": 6,
|
||||
"RC": 7,
|
||||
"PL": 8,
|
||||
}
|
||||
1
app_core/db/__init__.py
Normal file
1
app_core/db/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Database Package
|
||||
BIN
app_core/db/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
app_core/db/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/db/__pycache__/database.cpython-312.pyc
Normal file
BIN
app_core/db/__pycache__/database.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/db/__pycache__/models.cpython-312.pyc
Normal file
BIN
app_core/db/__pycache__/models.cpython-312.pyc
Normal file
Binary file not shown.
21
app_core/db/database.py
Normal file
21
app_core/db/database.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from app_core.config.settings import AppSettings
|
||||
|
||||
settings = AppSettings()
|
||||
|
||||
if not settings.database_url:
|
||||
raise RuntimeError(
|
||||
"Database configuration missing. Set DATABASE_URL or DB_HOST/DB_PORT/DB_NAME/DB_USER/DB_PASSWORD in a .env file at the project root."
|
||||
)
|
||||
|
||||
engine = create_engine(settings.database_url, pool_pre_ping=True, future=True, echo=settings.db_echo)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, future=True)
|
||||
Base = declarative_base()
|
||||
|
||||
def get_db_session():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
36
app_core/db/models.py
Normal file
36
app_core/db/models.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime, func, UniqueConstraint
|
||||
from .database import Base
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "workolik_users"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("email", name="uq_workolik_users_email"),
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
email = Column(String(255), nullable=False, unique=True, index=True)
|
||||
password_hash = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
|
||||
|
||||
class EmailLog(Base):
|
||||
__tablename__ = "email_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
sent_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
recipients = Column(String(1024), nullable=False)
|
||||
subject = Column(String(255), nullable=False)
|
||||
status = Column(String(50), nullable=False) # sent / failed
|
||||
error = Column(String(1024))
|
||||
date_for = Column(String(32), nullable=False)
|
||||
|
||||
class TriumphDebtorMapping(Base):
|
||||
__tablename__ = "triumph_debtor_mappings"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
code = Column(String(50))
|
||||
name = Column(String(255))
|
||||
dbmacc = Column(String(50))
|
||||
outlet = Column(String(255))
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
BIN
app_core/services/__pycache__/auth_service.cpython-312.pyc
Normal file
BIN
app_core/services/__pycache__/auth_service.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/services/__pycache__/daily_report.cpython-312.pyc
Normal file
BIN
app_core/services/__pycache__/daily_report.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/services/__pycache__/mailer_service.cpython-312.pyc
Normal file
BIN
app_core/services/__pycache__/mailer_service.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/services/__pycache__/mappings_service.cpython-312.pyc
Normal file
BIN
app_core/services/__pycache__/mappings_service.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/services/__pycache__/scheduler_service.cpython-312.pyc
Normal file
BIN
app_core/services/__pycache__/scheduler_service.cpython-312.pyc
Normal file
Binary file not shown.
46
app_core/services/auth_service.py
Normal file
46
app_core/services/auth_service.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import select
|
||||
import bcrypt
|
||||
from app_core.db.database import engine, Base, SessionLocal
|
||||
from app_core.db.models import User
|
||||
|
||||
# Create tables on import
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
class AuthService:
|
||||
def __init__(self) -> None:
|
||||
self._session_factory = SessionLocal
|
||||
|
||||
def _hash_password(self, raw_password: str) -> str:
|
||||
salt = bcrypt.gensalt()
|
||||
return bcrypt.hashpw(raw_password.encode("utf-8"), salt).decode("utf-8")
|
||||
|
||||
def _verify_password(self, raw_password: str, hashed: str) -> bool:
|
||||
try:
|
||||
return bcrypt.checkpw(raw_password.encode("utf-8"), hashed.encode("utf-8"))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def signup(self, email: str, password: str) -> tuple[bool, str]:
|
||||
email = email.strip().lower()
|
||||
if not email or not password:
|
||||
return False, "Email and password are required."
|
||||
with self._session_factory() as db: # type: Session
|
||||
exists = db.execute(select(User).where(User.email == email)).scalar_one_or_none()
|
||||
if exists:
|
||||
return False, "Email already registered."
|
||||
user = User(email=email, password_hash=self._hash_password(password))
|
||||
db.add(user)
|
||||
db.commit()
|
||||
return True, "Account created. Please login."
|
||||
|
||||
def login(self, email: str, password: str) -> tuple[bool, Optional[dict], str]:
|
||||
email = email.strip().lower()
|
||||
if not email or not password:
|
||||
return False, None, "Email and password are required."
|
||||
with self._session_factory() as db: # type: Session
|
||||
user = db.execute(select(User).where(User.email == email)).scalar_one_or_none()
|
||||
if not user or not self._verify_password(password, user.password_hash):
|
||||
return False, None, "Invalid credentials."
|
||||
return True, {"id": user.id, "email": user.email}, "Login successful."
|
||||
53
app_core/services/daily_report.py
Normal file
53
app_core/services/daily_report.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from app_core.config.settings import AppSettings
|
||||
from app_core.services.mailer_service import MailerService
|
||||
|
||||
|
||||
def main(for_date: str | None = None, force: bool = False) -> int:
|
||||
settings = AppSettings()
|
||||
service = MailerService(settings)
|
||||
|
||||
if for_date:
|
||||
try:
|
||||
chosen = datetime.strptime(for_date, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
print(f"Invalid date format: {for_date}. Expected YYYY-MM-DD.")
|
||||
return 1
|
||||
else:
|
||||
today_ist = datetime.now(ZoneInfo("Asia/Kolkata")).date()
|
||||
chosen = service.select_report_date(preferred=today_ist)
|
||||
|
||||
if not chosen:
|
||||
print("No data available to send.")
|
||||
return 1
|
||||
|
||||
if not force and service.has_sent_for_date(str(chosen)):
|
||||
print(f"Already sent for {chosen}; skipping.")
|
||||
return 0
|
||||
|
||||
df = service.fetch_daily_rows(chosen)
|
||||
if df.empty:
|
||||
print("Selected date has no rows; nothing to send.")
|
||||
return 0
|
||||
|
||||
row = df.iloc[0].to_dict()
|
||||
html = service.build_email_html(row, df)
|
||||
|
||||
recipients_env = settings.report_recipients or os.getenv("REPORT_RECIPIENTS")
|
||||
if not recipients_env:
|
||||
print("REPORT_RECIPIENTS env var is empty. Set it to comma-separated emails.")
|
||||
return 2
|
||||
recipients = [r.strip() for r in recipients_env.split(',') if r.strip()]
|
||||
ok, msg = service.send_email(recipients, subject=f"Daily Digest - {chosen}", html=html)
|
||||
service.log_email(recipients, subject=f"Daily Digest - {chosen}", date_for=str(chosen), status="sent" if ok else "failed", error=None if ok else msg)
|
||||
print("Sent" if ok else f"Failed: {msg}")
|
||||
return 0 if ok else 3
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
|
||||
|
||||
363
app_core/services/mailer_service.py
Normal file
363
app_core/services/mailer_service.py
Normal file
@@ -0,0 +1,363 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import smtplib
|
||||
import sys
|
||||
import os
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from typing import List, Tuple
|
||||
from datetime import date
|
||||
from datetime import date
|
||||
import pandas as pd
|
||||
from sqlalchemy import text
|
||||
import streamlit as st
|
||||
|
||||
# Add the project root to Python path
|
||||
project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
from app_core.config.settings import AppSettings, STORES
|
||||
from app_core.db.database import engine, SessionLocal
|
||||
from app_core.db.models import EmailLog
|
||||
|
||||
|
||||
class MailerService:
|
||||
def __init__(self, settings: AppSettings | None = None) -> None:
|
||||
self.settings = settings or AppSettings()
|
||||
|
||||
def fetch_daily_rows(self, report_date) -> pd.DataFrame:
|
||||
sql = (
|
||||
'SELECT * FROM "tenantpostings" '
|
||||
'WHERE "created_at"::date = %(d)s '
|
||||
'ORDER BY "id" DESC '
|
||||
'LIMIT 10000'
|
||||
)
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(sql, conn, params={"d": report_date})
|
||||
return df
|
||||
|
||||
def select_report_date(self, preferred: date | None = None) -> date | None:
|
||||
"""Return preferred date if it has data; else most recent date with data; else None."""
|
||||
with engine.connect() as conn:
|
||||
dates_df = pd.read_sql(
|
||||
'SELECT "created_at"::date AS d, COUNT(*) AS c\n'
|
||||
'FROM "tenantpostings"\n'
|
||||
'GROUP BY d\n'
|
||||
'ORDER BY d DESC',
|
||||
conn,
|
||||
)
|
||||
if dates_df.empty:
|
||||
return None
|
||||
# Normalize
|
||||
if 'd' not in dates_df.columns:
|
||||
return None
|
||||
dates_df['d'] = pd.to_datetime(dates_df['d'], errors='coerce')
|
||||
available = [d.date() for d in dates_df['d'].dropna().tolist()]
|
||||
if preferred and preferred in available:
|
||||
return preferred
|
||||
return available[0] if available else None
|
||||
|
||||
def build_email_html(self, row: dict, df: pd.DataFrame | None = None) -> str:
|
||||
# Robust de-duplication of the entire dataframe before processing
|
||||
if df is not None and not df.empty:
|
||||
def get_priority(status):
|
||||
val = str(status).lower()
|
||||
if any(x in val for x in ["success", "ok", "posted", "completed", "done"]):
|
||||
return 0
|
||||
if any(x in val for x in ["pending", "queue", "waiting", "processing"]):
|
||||
return 1
|
||||
return 2
|
||||
|
||||
df = df.copy()
|
||||
if 'triumph_status' in df.columns:
|
||||
df['_priority'] = df['triumph_status'].apply(get_priority)
|
||||
else:
|
||||
df['_priority'] = 2
|
||||
|
||||
# Sort by priority (success first) and then by ID (newest first)
|
||||
sort_cols = ['_priority', 'id']
|
||||
df = df.sort_values(sort_cols, ascending=[True, False])
|
||||
|
||||
# 1. Deduplicate by triumph_event (if present and not empty)
|
||||
if 'triumph_event' in df.columns:
|
||||
has_event = (df['triumph_event'].fillna('').astype(str).str.strip() != '') & (df['triumph_event'].astype(str) != '-')
|
||||
df_with_ev = df[has_event].drop_duplicates(subset=['tenant_id', 'processing_type', 'triumph_event'], keep='first')
|
||||
df_no_ev = df[~has_event]
|
||||
df = pd.concat([df_with_ev, df_no_ev]).sort_values(sort_cols, ascending=[True, False])
|
||||
|
||||
# 2. Deduplicate by register_close_id (for Journals/Banking Journals)
|
||||
if 'register_close_id' in df.columns:
|
||||
has_rc = (df['register_close_id'].fillna('').astype(str).str.strip() != '') & (df['register_close_id'].astype(str) != '-')
|
||||
df_with_rc = df[has_rc].drop_duplicates(subset=['tenant_id', 'processing_type', 'register_close_id'], keep='first')
|
||||
df_no_rc = df[~has_rc]
|
||||
df = pd.concat([df_with_rc, df_no_rc]).sort_values(sort_cols, ascending=[True, False])
|
||||
|
||||
# 3. Deduplicate by sale_ids (for Invoices/Receipts)
|
||||
if 'sale_ids' in df.columns:
|
||||
has_sales = (df['sale_ids'].fillna('').astype(str).str.strip() != '')
|
||||
df_with_sales = df[has_sales].drop_duplicates(subset=['tenant_id', 'processing_type', 'sale_ids'], keep='first')
|
||||
df_no_sales = df[~has_sales]
|
||||
df = pd.concat([df_with_sales, df_no_sales]).sort_values(sort_cols, ascending=[True, False])
|
||||
|
||||
df = df.drop(columns=['_priority'], errors='ignore')
|
||||
|
||||
outlet = row.get("outlet_name") or row.get("register_name") or "Outlet"
|
||||
division = row.get("division_code") or "PC"
|
||||
status = (row.get("triumph_status") or "Posted successfully").capitalize()
|
||||
register_close_id = row.get("register_close_id", "-")
|
||||
register_id = row.get("register_id", "-")
|
||||
|
||||
def lines_for(ptype: str) -> list[str]:
|
||||
"""Return formatted lines for all rows of a processing_type.
|
||||
Example line: 3,616.19 (Event ID: 2904783)
|
||||
"""
|
||||
if df is None or df.empty or 'processing_type' not in df.columns:
|
||||
return []
|
||||
sub = df[df['processing_type'].astype(str).str.upper() == ptype.upper()] if 'processing_type' in df.columns else pd.DataFrame()
|
||||
if sub.empty:
|
||||
return []
|
||||
|
||||
# Data is already deduplicated at the start of build_email_html
|
||||
sub = sub.sort_values('id', ascending=False)
|
||||
|
||||
result: list[str] = []
|
||||
for _, r in sub.sort_values('id', ascending=False).iterrows():
|
||||
amt = r.get('total_amount')
|
||||
evt = r.get('triumph_event', '-')
|
||||
try:
|
||||
amt_str = f"{float(amt):,.2f}"
|
||||
except Exception:
|
||||
amt_str = str(amt) if amt is not None else '-'
|
||||
result.append(f"<span style=\"font-weight:600;\">{amt_str}</span> (Event ID: <span style=\"font-weight:600;\">{evt}</span>)")
|
||||
return result
|
||||
|
||||
journal_lines = lines_for('JOURNAL')
|
||||
bank_journal_lines = lines_for('BANKING_JOURNAL')
|
||||
invoice_lines = lines_for('INVOICE')
|
||||
receipt_lines = lines_for('RECEIPT')
|
||||
|
||||
# Optional: transaction summary by store (single table)
|
||||
store_summary_table_html = ""
|
||||
events_matrix_html = ""
|
||||
if isinstance(df, pd.DataFrame) and not df.empty and ('tenant_id' in df.columns):
|
||||
def summarize_for(store: dict) -> dict[str, str]:
|
||||
sid = store.get('tenant_id')
|
||||
name = store.get('label')
|
||||
sub = df[df['tenant_id'] == sid]
|
||||
|
||||
# Data is already deduplicated at the start of build_email_html
|
||||
sub = sub.sort_values('id', ascending=False)
|
||||
|
||||
def pick_total(kind: str) -> tuple[str, int]:
|
||||
if sub.empty or 'processing_type' not in sub.columns:
|
||||
return ("0.00", 0)
|
||||
s = sub[sub['processing_type'].astype(str).str.upper() == kind]
|
||||
if s.empty:
|
||||
return ("0.00", 0)
|
||||
try:
|
||||
total = float(s['total_amount'].fillna(0).sum()) if 'total_amount' in s.columns else 0.0
|
||||
except Exception:
|
||||
total = 0.0
|
||||
return (f"{total:,.2f}", len(s))
|
||||
def has_rows(kind: str) -> bool:
|
||||
if sub.empty or 'processing_type' not in sub.columns:
|
||||
return False
|
||||
s = sub[sub['processing_type'].astype(str).str.upper() == kind]
|
||||
return not s.empty
|
||||
def latest_event(kind: str) -> str:
|
||||
if sub.empty or 'processing_type' not in sub.columns:
|
||||
return "-"
|
||||
s = sub[sub['processing_type'].astype(str).str.upper() == kind]
|
||||
if s.empty:
|
||||
return "-"
|
||||
series = s.get('triumph_event') if 'triumph_event' in s.columns else None
|
||||
if series is None or series.empty:
|
||||
return "-"
|
||||
try:
|
||||
return str(series.dropna().astype(str).iloc[0])
|
||||
except Exception:
|
||||
return "-"
|
||||
def latest_status_emoji(kind: str) -> str:
|
||||
if sub.empty or 'processing_type' not in sub.columns:
|
||||
return ""
|
||||
s = sub[sub['processing_type'].astype(str).str.upper() == kind]
|
||||
if s.empty:
|
||||
return ""
|
||||
status_series = s.get('triumph_status') if 'triumph_status' in s.columns else None
|
||||
if status_series is None or status_series.empty:
|
||||
return ""
|
||||
try:
|
||||
val = str(status_series.iloc[0]).strip().lower()
|
||||
except Exception:
|
||||
val = ""
|
||||
if any(x in val for x in ["success", "ok", "completed", "done"]):
|
||||
return " ✅"
|
||||
if any(x in val for x in ["fail", "error", "invalid", "dead"]):
|
||||
return " ❌"
|
||||
if any(x in val for x in ["pending", "queue", "waiting", "processing"]):
|
||||
return " ⚠️"
|
||||
return ""
|
||||
j_total, _ = pick_total('JOURNAL')
|
||||
b_total, _ = pick_total('BANKING_JOURNAL')
|
||||
i_total, _ = pick_total('INVOICE')
|
||||
r_total, _ = pick_total('RECEIPT')
|
||||
j_eid = latest_event('JOURNAL'); j_stat = latest_status_emoji('JOURNAL')
|
||||
b_eid = latest_event('BANKING_JOURNAL'); b_stat = latest_status_emoji('BANKING_JOURNAL')
|
||||
i_eid = latest_event('INVOICE'); i_stat = latest_status_emoji('INVOICE')
|
||||
r_eid = latest_event('RECEIPT'); r_stat = latest_status_emoji('RECEIPT')
|
||||
def render_cell(exists: bool, total: str, eid: str, stat: str, ptype: str = "") -> str:
|
||||
if not exists:
|
||||
return "<span style=\"color:#9AA4B2;\">Nill</span>"
|
||||
|
||||
# For INVOICE and RECEIPT, show individual line items if multiple exist
|
||||
if ptype.upper() in ['INVOICE', 'RECEIPT'] and sub is not None and not sub.empty:
|
||||
type_sub = sub[sub['processing_type'].astype(str).str.upper() == ptype.upper()]
|
||||
if len(type_sub) > 1: # Multiple transactions
|
||||
individual_lines = []
|
||||
for _, r in type_sub.sort_values('id', ascending=False).iterrows():
|
||||
amt = r.get('total_amount')
|
||||
evt = r.get('triumph_event', '-')
|
||||
status_val = str(r.get('triumph_status', '')).strip().lower()
|
||||
status_emoji = ""
|
||||
if any(x in status_val for x in ["success", "ok", "completed", "done"]):
|
||||
status_emoji = " ✅"
|
||||
elif any(x in status_val for x in ["fail", "error", "invalid", "dead"]):
|
||||
status_emoji = " ❌"
|
||||
elif any(x in status_val for x in ["pending", "queue", "waiting", "processing"]):
|
||||
status_emoji = " ⚠️"
|
||||
try:
|
||||
amt_str = f"{float(amt):,.2f}"
|
||||
except Exception:
|
||||
amt_str = str(amt) if amt is not None else '-'
|
||||
individual_lines.append(f"<div style=\"font-size:11px;margin:1px 0;\">{amt_str} ({evt}){status_emoji}</div>")
|
||||
|
||||
return f"<strong>{total}</strong><br/><span style=\"color:#64748b;font-size:10px;\">Total ({len(type_sub)} items)</span><br/>{''.join(individual_lines)}"
|
||||
|
||||
return f"<strong>{total}</strong><br/><span style=\"color:#64748b\">({eid})</span> {stat}"
|
||||
|
||||
return {
|
||||
"name": name,
|
||||
"journal": render_cell(has_rows('JOURNAL'), j_total, j_eid, j_stat),
|
||||
"banking": render_cell(has_rows('BANKING_JOURNAL'), b_total, b_eid, b_stat),
|
||||
"invoice": render_cell(has_rows('INVOICE'), i_total, i_eid, i_stat, 'INVOICE'),
|
||||
"receipt": render_cell(has_rows('RECEIPT'), r_total, r_eid, r_stat, 'RECEIPT'),
|
||||
}
|
||||
|
||||
rows = [summarize_for(s) for s in STORES]
|
||||
# Build single HTML table
|
||||
header = (
|
||||
"<tr>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Store Name</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Journal</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Banking Journal</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Account Sales</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Account Payments</th>"
|
||||
"</tr>"
|
||||
)
|
||||
body = []
|
||||
for r in rows:
|
||||
body.append(
|
||||
"<tr>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{r['name']}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{r['journal']}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{r['banking']}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{r['invoice']}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{r['receipt']}</td>"
|
||||
"</tr>"
|
||||
)
|
||||
store_summary_table_html = (
|
||||
"<div style=\"background:#111827;border-radius:12px;padding:12px;\">"
|
||||
"<div style=\"font-weight:700;color:#F8FAFC;margin-bottom:6px;\">Transaction Summary by Store</div>"
|
||||
"<table style=\"width:100%;border-collapse:collapse;font-size:12px;\">"
|
||||
+ header + "".join(body) + "</table></div>"
|
||||
)
|
||||
|
||||
html = f"""
|
||||
<div style="font-family: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial; color:#0f172a; font-size:13px; line-height:1.5;">
|
||||
<p style="margin:0 0 8px 0">Hello <strong>Tucker Fresh</strong>,</p>
|
||||
<p style="margin:0 0 12px 0">Here's your daily digest of posted transactions:</p>
|
||||
{store_summary_table_html}
|
||||
<p style="margin:12px 0 6px 0">Thank you for staying updated with us.</p>
|
||||
<p style="margin:0">Best regards,<br/><strong>Workolik Team</strong></p>
|
||||
</div>
|
||||
"""
|
||||
return html
|
||||
|
||||
def send_email(self, recipients: List[str], subject: str, html: str) -> Tuple[bool, str]:
|
||||
s = self.settings
|
||||
if not all([s.smtp_host, s.smtp_port, s.smtp_user, s.smtp_password, s.smtp_from_email]):
|
||||
return False, "SMTP settings are incomplete."
|
||||
|
||||
# Optional BCC via env (comma-separated), default empty
|
||||
bcc_env = os.getenv("BCC_RECIPIENTS", "").strip()
|
||||
bcc_recipients = [e.strip() for e in bcc_env.split(',') if e.strip()] if bcc_env else []
|
||||
all_recipients = recipients + bcc_recipients
|
||||
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["From"] = f"{s.smtp_from_name} <{s.smtp_from_email}>"
|
||||
msg["To"] = ", ".join(recipients)
|
||||
msg["Subject"] = subject
|
||||
msg.attach(MIMEText(html, "html"))
|
||||
|
||||
try:
|
||||
server = smtplib.SMTP(s.smtp_host, s.smtp_port, timeout=30)
|
||||
if s.smtp_use_tls:
|
||||
server.starttls()
|
||||
server.login(s.smtp_user, s.smtp_password)
|
||||
server.sendmail(s.smtp_from_email, all_recipients, msg.as_string())
|
||||
server.quit()
|
||||
return True, "sent"
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def log_email(self, recipients: List[str], subject: str, date_for: str, status: str, error: str | None = None) -> None:
|
||||
with SessionLocal() as db:
|
||||
entry = EmailLog(
|
||||
recipients=", ".join(recipients),
|
||||
subject=subject,
|
||||
status=status,
|
||||
error=error,
|
||||
date_for=date_for,
|
||||
)
|
||||
db.add(entry)
|
||||
db.commit()
|
||||
|
||||
def has_sent_for_date(self, date_for: str) -> bool:
|
||||
"""Return True if a successful send log exists for the given date."""
|
||||
with SessionLocal() as db:
|
||||
row = (
|
||||
db.query(EmailLog)
|
||||
.filter(EmailLog.date_for == date_for, EmailLog.status == "sent")
|
||||
.order_by(EmailLog.sent_at.desc())
|
||||
.first()
|
||||
)
|
||||
return row is not None
|
||||
|
||||
def recent_logs(self, limit: int = 50) -> list[dict]:
|
||||
return _get_recent_logs_cached(limit)
|
||||
|
||||
|
||||
@st.cache_data(ttl=60) # Cache for 1 minute
|
||||
def _get_recent_logs_cached(limit: int = 50) -> list[dict]:
|
||||
"""Cached function to get recent email logs."""
|
||||
with SessionLocal() as db:
|
||||
rows = (
|
||||
db.query(EmailLog)
|
||||
.order_by(EmailLog.sent_at.desc())
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
return [
|
||||
{
|
||||
"id": r.id,
|
||||
"sent_at": r.sent_at,
|
||||
"recipients": r.recipients,
|
||||
"subject": r.subject,
|
||||
"status": r.status,
|
||||
"error": r.error,
|
||||
"date_for": r.date_for,
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
|
||||
52
app_core/services/mappings_service.py
Normal file
52
app_core/services/mappings_service.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.orm import Session
|
||||
from app_core.db.database import SessionLocal
|
||||
from app_core.db.models import TriumphDebtorMapping
|
||||
from datetime import datetime
|
||||
|
||||
class MappingsService:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_all_mappings(self) -> List[TriumphDebtorMapping]:
|
||||
with SessionLocal() as db:
|
||||
return db.query(TriumphDebtorMapping).order_by(TriumphDebtorMapping.id.asc()).all()
|
||||
|
||||
def get_mapping_by_id(self, mapping_id: int) -> Optional[TriumphDebtorMapping]:
|
||||
with SessionLocal() as db:
|
||||
return db.query(TriumphDebtorMapping).filter(TriumphDebtorMapping.id == mapping_id).first()
|
||||
|
||||
def create_mapping(self, code: str, name: str, dbmacc: str, outlet: str) -> TriumphDebtorMapping:
|
||||
with SessionLocal() as db:
|
||||
mapping = TriumphDebtorMapping(
|
||||
code=code,
|
||||
name=name,
|
||||
dbmacc=dbmacc,
|
||||
outlet=outlet
|
||||
)
|
||||
db.add(mapping)
|
||||
db.commit()
|
||||
db.refresh(mapping)
|
||||
return mapping
|
||||
|
||||
def update_mapping(self, mapping_id: int, code: str, name: str, dbmacc: str, outlet: str) -> bool:
|
||||
with SessionLocal() as db:
|
||||
mapping = db.query(TriumphDebtorMapping).filter(TriumphDebtorMapping.id == mapping_id).first()
|
||||
if mapping:
|
||||
mapping.code = code
|
||||
mapping.name = name
|
||||
mapping.dbmacc = dbmacc
|
||||
mapping.outlet = outlet
|
||||
mapping.updated_at = datetime.now()
|
||||
db.commit()
|
||||
return True
|
||||
return False
|
||||
|
||||
def delete_mapping(self, mapping_id: int) -> bool:
|
||||
with SessionLocal() as db:
|
||||
mapping = db.query(TriumphDebtorMapping).filter(TriumphDebtorMapping.id == mapping_id).first()
|
||||
if mapping:
|
||||
db.delete(mapping)
|
||||
db.commit()
|
||||
return True
|
||||
return False
|
||||
89
app_core/services/scheduler_service.py
Normal file
89
app_core/services/scheduler_service.py
Normal file
@@ -0,0 +1,89 @@
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from apscheduler.executors.pool import ThreadPoolExecutor
|
||||
from apscheduler.jobstores.memory import MemoryJobStore
|
||||
|
||||
from app_core.services.daily_report import main as run_daily_report
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SchedulerService:
|
||||
def __init__(self):
|
||||
self.scheduler = None
|
||||
self.ist = ZoneInfo("Asia/Kolkata")
|
||||
|
||||
def start_scheduler(self):
|
||||
"""Start the background scheduler for daily email reports."""
|
||||
if self.scheduler and self.scheduler.running:
|
||||
logger.info("Scheduler is already running")
|
||||
return
|
||||
|
||||
# Configure job stores and executors
|
||||
jobstores = {
|
||||
'default': MemoryJobStore()
|
||||
}
|
||||
executors = {
|
||||
'default': ThreadPoolExecutor(20)
|
||||
}
|
||||
job_defaults = {
|
||||
'coalesce': False,
|
||||
'max_instances': 1
|
||||
}
|
||||
|
||||
self.scheduler = BackgroundScheduler(
|
||||
jobstores=jobstores,
|
||||
executors=executors,
|
||||
job_defaults=job_defaults,
|
||||
timezone=self.ist
|
||||
)
|
||||
|
||||
# Schedule daily email at 8:00 PM IST (20:00)
|
||||
self.scheduler.add_job(
|
||||
func=self._send_daily_report,
|
||||
trigger=CronTrigger(hour=20, minute=0, timezone=self.ist),
|
||||
id='daily_email_report',
|
||||
name='Daily Email Report',
|
||||
replace_existing=True
|
||||
)
|
||||
|
||||
# Start the scheduler
|
||||
self.scheduler.start()
|
||||
logger.info("Daily email scheduler started - will send reports at 8:00 PM IST")
|
||||
|
||||
def stop_scheduler(self):
|
||||
"""Stop the background scheduler."""
|
||||
if self.scheduler and self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
logger.info("Daily email scheduler stopped")
|
||||
|
||||
def _send_daily_report(self):
|
||||
"""Internal method to send daily report."""
|
||||
try:
|
||||
logger.info(f"Starting daily report at {datetime.now(self.ist)}")
|
||||
result = run_daily_report()
|
||||
if result == 0:
|
||||
logger.info("Daily report sent successfully")
|
||||
else:
|
||||
logger.warning(f"Daily report failed with exit code: {result}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending daily report: {str(e)}")
|
||||
|
||||
def get_next_run_time(self):
|
||||
"""Get the next scheduled run time for the daily report."""
|
||||
if not self.scheduler or not self.scheduler.running:
|
||||
return None
|
||||
|
||||
job = self.scheduler.get_job('daily_email_report')
|
||||
if job:
|
||||
return job.next_run_time
|
||||
return None
|
||||
|
||||
def is_running(self):
|
||||
"""Check if scheduler is running."""
|
||||
return self.scheduler is not None and self.scheduler.running
|
||||
BIN
app_core/ui/__pycache__/auth_ui.cpython-312.pyc
Normal file
BIN
app_core/ui/__pycache__/auth_ui.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app_core/ui/__pycache__/layout.cpython-312.pyc
Normal file
BIN
app_core/ui/__pycache__/layout.cpython-312.pyc
Normal file
Binary file not shown.
37
app_core/ui/auth_ui.py
Normal file
37
app_core/ui/auth_ui.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import re
|
||||
import streamlit as st
|
||||
from app_core.services.auth_service import AuthService
|
||||
|
||||
|
||||
def _is_valid_email(value: str) -> bool:
|
||||
if not value:
|
||||
return False
|
||||
pattern = r"^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$"
|
||||
return re.match(pattern, value.strip()) is not None
|
||||
|
||||
|
||||
def render_auth_card(auth_service: AuthService) -> None:
|
||||
left, center, right = st.columns([1, 1.2, 1])
|
||||
with center:
|
||||
|
||||
st.markdown('<div class="auth-title">Welcome !!</div>', unsafe_allow_html=True)
|
||||
st.markdown('<div class="muted" style="margin-bottom:16px;">Sign in to continue</div>', unsafe_allow_html=True)
|
||||
|
||||
with st.form("login_form", clear_on_submit=False):
|
||||
email = st.text_input("Email", placeholder="you@example.com", key="login_email")
|
||||
password = st.text_input("Password", type="password", placeholder="••••••••", key="login_password")
|
||||
submitted = st.form_submit_button("Sign in →", use_container_width=True)
|
||||
if submitted:
|
||||
if not _is_valid_email(email):
|
||||
st.error("Enter a valid email address.")
|
||||
elif not password:
|
||||
st.error("Password is required.")
|
||||
else:
|
||||
ok, user, msg = auth_service.login(email, password)
|
||||
if ok and user:
|
||||
st.session_state.auth_user = user
|
||||
st.success(msg)
|
||||
st.rerun()
|
||||
else:
|
||||
st.error(msg)
|
||||
st.markdown('</div>', unsafe_allow_html=True)
|
||||
314
app_core/ui/layout.py
Normal file
314
app_core/ui/layout.py
Normal file
@@ -0,0 +1,314 @@
|
||||
import streamlit as st
|
||||
from app_core.config.settings import STORES, STORE_CODE_TO_TENANT_ID
|
||||
|
||||
def apply_global_style(background_url: str | None = None) -> None:
|
||||
css = """
|
||||
<style>
|
||||
:root { --brand: ; --brandDark: #0F4FD6; --text: #0F172A; --muted: #64748b; --border: rgba(15, 23, 42, 0.08); --inputBorder: rgba(22,98,243,0.35); }
|
||||
.stApp { background: transparent !important; position: relative !important; min-height: 100vh; font-size: 17px; }
|
||||
[data-testid="stAppViewContainer"] { background: transparent !important; }
|
||||
[data-testid="stAppViewContainer"] > header, [data-testid="stHeader"], .stApp header { background: transparent !important; box-shadow: none !important; }
|
||||
.stAppToolbar { background: transparent !important; box-shadow: none !important; border-bottom: none !important; }
|
||||
|
||||
|
||||
/* Header sections */
|
||||
.tfw-header { background: #FFFFFF; border-bottom: 1px solid var(--border); }
|
||||
.tfw-header-white { background: #FFFFFF; padding: 16px 0; }
|
||||
.tfw-header-grey { background: #F8FAFC; padding: 12px 0; border-top: 1px solid rgba(15, 23, 42, 0.00); }
|
||||
.tfw-header-content { max-width: 1200px; margin: 0 auto; padding: 0 20px; }
|
||||
|
||||
/* Topbar (same height, narrower sides) */
|
||||
.tfw-topbar { position: fixed; top: 0; left: 0; right: 0; height: 48px; background: rgba(248,250,252,0.6); backdrop-filter: saturate(180%) border-bottom: none; z-index: 999; }
|
||||
.tfw-topbar .tfw-inner { height: 48px; display: flex; align-items: center; gap: 8px; padding: 0 8px; max-width: 1100px; margin: 0 auto; }
|
||||
.tfw-logo { width: 18px; height: 18px; border-radius: 6px; background: linear-gradient(135deg, var(--brand), var(--brandDark)); display: inline-block; }
|
||||
.tfw-title { font-weight: 800; color: var(--text); letter-spacing: -0.02em; font-size: 1.0rem; }
|
||||
|
||||
/* Sidebar enhancements */
|
||||
[data-testid="stSidebar"] .sidebar-content { display: flex; flex-direction: column; height: 100%; font-size: 1rem; }
|
||||
[data-testid="stSidebar"] .sidebar-section { padding: 8px 6px; }
|
||||
[data-testid="stSidebar"] .sidebar-spacer { flex: 1 1 auto; }
|
||||
[data-testid="stSidebar"] .sidebar-logout { padding: 10px 6px; border-top: 1px solid var(--border); }
|
||||
[data-testid="stSidebar"] .sidebar-logout button { width: 100%; border-radius: 10px; }
|
||||
|
||||
/* Sidebar logo */
|
||||
.sidebar-logo { display: flex; align-items: center; gap: 12px; padding: 16px 20px; border-bottom: 1px solid var(--border); margin-bottom: 8px; }
|
||||
.sidebar-logo-icon { width: 32px; height: 32px; border-radius: 8px; overflow: hidden; }
|
||||
.sidebar-logo-icon img { width: 100%; height: 100%; object-fit: contain; }
|
||||
.sidebar-logo-text { font-weight: 800; color: var(--text); font-size: 1.1rem; letter-spacing: -0.02em; }
|
||||
|
||||
/* Auth card (extra-slim, centered) */
|
||||
.auth-card { position: relative; padding: 24px; background: rgba(255,255,255,0.85); backdrop-filter: blur(8px); border: 1px solid rgba(22,98,240,0.22); border-radius: 16px; box-shadow: 0 18px 40px rgba(2,6,23,0.10); transition: box-shadow .2s ease, transform .2s ease; max-width: 520px; width: 100%; margin: 0 auto; }
|
||||
.auth-card.auth-slim { max-width: 420px; }
|
||||
.auth-card.auth-xs { max-width: 360px; }
|
||||
.auth-card::before { content: ""; position: absolute; top: 0; left: 0; right: 0; height: 6px; background: linear-gradient(90deg, #22C55E, #16A34A, #0ea5e9); border-top-left-radius: 16px; border-top-right-radius: 16px; }
|
||||
.auth-card:hover { box-shadow: 0 26px 56px rgba(2,6,23,0.16); transform: translateY(-2px); }
|
||||
|
||||
/* Success ribbon shown on login */
|
||||
.login-success { background: linear-gradient(90deg, #22C55E, #16A34A); color: #fff; border-radius: 12px; padding: 10px 14px; box-shadow: 0 10px 24px rgba(34,197,94,0.35); display: flex; align-items: center; gap: 8px; font-weight: 700; }
|
||||
.login-success .emoji { filter: drop-shadow(0 4px 8px rgba(0,0,0,0.2)); }
|
||||
|
||||
.auth-title { margin: 6px 0 8px 0; font-size: 1.8rem; font-weight: 800; color: var(--text); letter-spacing: -0.02em; text-align: center; }
|
||||
.muted { color: var(--muted); font-size: 1.0rem; text-align: center; }
|
||||
|
||||
/* Inputs: light blue border (global) */
|
||||
div[data-testid="stTextInput"] input,
|
||||
div[data-testid="stPassword"] input,
|
||||
textarea {
|
||||
border-radius: 10px !important;
|
||||
border: 1px solid var(--inputBorder) !important;
|
||||
box-shadow: inset 0 1px 2px rgba(2,6,23,0.04) !important;
|
||||
background: #FFFFFF !important;
|
||||
}
|
||||
/* Prevent outer wrapper hover/focus rings (avoid double boxes) */
|
||||
div[data-baseweb="input"]:hover, div[data-baseweb="input"]:focus-within,
|
||||
div[data-baseweb="textarea"]:hover, div[data-baseweb="textarea"]:focus-within,
|
||||
div[data-testid="stTextInput"] > div:hover, div[data-testid="stTextInput"] > div:focus-within,
|
||||
div[data-testid="stTextInput"] > div > div:hover, div[data-testid="stTextInput"] > div > div:focus-within,
|
||||
div[data-testid="stPassword"] > div:hover, div[data-testid="stPassword"] > div:focus-within,
|
||||
div[data-testid="stPassword"] > div > div:hover, div[data-testid="stPassword"] > div > div:focus-within {
|
||||
outline: none !important; box-shadow: none !important; border-color: transparent !important;
|
||||
}
|
||||
/* Subtle inner hover/focus on the actual field only */
|
||||
div[data-testid="stTextInput"] input:hover,
|
||||
div[data-testid="stPassword"] input:hover,
|
||||
textarea:hover { border-color: var(--inputBorder) !important; box-shadow: inset 0 0 0 1px rgba(22,98,243,0.25) !important; }
|
||||
div[data-testid="stTextInput"] input:focus,
|
||||
div[data-testid="stPassword"] input:focus,
|
||||
textarea:focus { outline: none !important; border-color: var(--inputBorder) !important; box-shadow: inset 0 0 0 1px rgba(22,98,243,0.45) !important; }
|
||||
|
||||
/* Constrain Streamlit form width regardless of dynamic class names */
|
||||
form[class*="stForm"], div[class*="stForm"] { max-width: 760px !important; margin-left: auto !important; margin-right: auto !important; padding-left: 8px !important; padding-right: 8px !important; }
|
||||
|
||||
/* Password field styling - expand to match email box and position eye icon */
|
||||
div[data-testid="stPassword"] { width: 100% !important; }
|
||||
div[data-testid="stPassword"] input { width: 100% !important; padding-right: 60px !important; }
|
||||
div[data-testid="stPassword"] button { position: absolute !important; right: 8px !important; top: 50% !important; transform: translateY(-50%) !important; background: none !important; border: none !important; padding: 4px !important; margin: 0 !important; }
|
||||
div[data-testid="stPassword"] button:hover { background: rgba(0,0,0,0.05) !important; border-radius: 4px !important; }
|
||||
|
||||
/* Buttons: global size + hover/transition */
|
||||
.stButton > button, [data-testid="stDownloadButton"] button { height: 40px; font-size: 0.95rem; border-radius: 10px; transition: transform .15s ease, box-shadow .15s ease; }
|
||||
.stButton > button:hover, [data-testid="stDownloadButton"] button:hover { transform: translateY(-1px); box-shadow: 0 10px 18px rgba(22,98,243,0.25); }
|
||||
|
||||
.auth-card .stCheckbox { font-size: 1.0rem; }
|
||||
|
||||
/* Auth buttons inherit global size but keep gradient */
|
||||
.auth-card .stButton > button { background: linear-gradient(135deg, #22C55E, #16A34A); color: #fff; font-weight: 800; letter-spacing: .2px; border: none; box-shadow: 0 10px 18px rgba(34,197,94,0.28); }
|
||||
.auth-card .stButton > button:hover { filter: brightness(0.98); }
|
||||
|
||||
/* Match info alert with content card look */
|
||||
div[role="alert"] { background: #F6FAFF !important; border: 1px solid rgba(22,98,243,0.18) !important; color: var(--text) !important; }
|
||||
|
||||
/* DataFrame font sizes */
|
||||
div[data-testid="stDataFrame"] table { font-size: 0.98rem; }
|
||||
div[data-testid="stDataFrame"] th { font-size: 1.0rem; }
|
||||
</style>
|
||||
"""
|
||||
st.markdown(css, unsafe_allow_html=True)
|
||||
|
||||
# Optional login/background image with 50% transparency
|
||||
if background_url:
|
||||
# Support @prefix and local files by embedding as base64 when needed
|
||||
try:
|
||||
import os, base64
|
||||
url = background_url.lstrip('@').strip()
|
||||
if url.startswith('http://') or url.startswith('https://'):
|
||||
data_url = url
|
||||
else:
|
||||
# Treat as local file path
|
||||
# Map shorthand names to assets/ if needed
|
||||
if url in {"bg.jpg", "workolik.png"}:
|
||||
url = os.path.join("assets", url)
|
||||
if os.path.exists(url):
|
||||
ext = os.path.splitext(url)[1].lower()
|
||||
mime = 'image/jpeg' if ext in ['.jpg', '.jpeg'] else 'image/png' if ext == '.png' else 'image/webp' if ext == '.webp' else 'image/*'
|
||||
with open(url, 'rb') as f:
|
||||
b64 = base64.b64encode(f.read()).decode()
|
||||
data_url = f'data:{mime};base64,{b64}'
|
||||
else:
|
||||
data_url = url # fallback; let browser try
|
||||
st.markdown(
|
||||
f"""
|
||||
<style>
|
||||
.stApp::before {{
|
||||
content: "";
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
z-index: 0;
|
||||
background-image: url('{data_url}');
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
background-repeat: no-repeat;
|
||||
opacity: 0.5; /* 50% transparent */
|
||||
filter: saturate(110%);
|
||||
}}
|
||||
</style>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def render_header(brand_name: str = "Workolik") -> None:
|
||||
st.markdown(
|
||||
f"""
|
||||
<div class="tfw-header">
|
||||
|
||||
</div>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
|
||||
|
||||
def render_topbar(brand_name: str = "") -> None:
|
||||
st.markdown(
|
||||
f"""
|
||||
<div class=\"tfw-topbar\">\n <div class=\"tfw-inner\">\n <span class=\"tfw-logo\"></span>\n <span class=\"tfw-title\">{brand_name}</span>\n </div>\n</div>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
|
||||
|
||||
def render_sidebar_logo(brand_name: str = "Workolik") -> None:
|
||||
import streamlit as st
|
||||
import base64
|
||||
import os
|
||||
|
||||
try:
|
||||
# Read the image file and encode it as base64
|
||||
logo_path = os.path.join("assets", "workolik.png")
|
||||
if os.path.exists(logo_path):
|
||||
with open(logo_path, "rb") as img_file:
|
||||
img_data = base64.b64encode(img_file.read()).decode()
|
||||
|
||||
st.markdown(
|
||||
f"""
|
||||
<div class="sidebar-logo">
|
||||
<div class="sidebar-logo-icon">
|
||||
<img src="data:image/png;base64,{img_data}" alt="Workolik Logo" style="width: 100%; height: 100%; object-fit: contain;" />
|
||||
</div>
|
||||
<div class="sidebar-logo-text">{brand_name}</div>
|
||||
</div>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
else:
|
||||
raise FileNotFoundError("Logo file not found")
|
||||
except Exception as e:
|
||||
# Fallback to text logo if image fails to load
|
||||
st.markdown(
|
||||
f"""
|
||||
<div class="sidebar-logo">
|
||||
<div class="sidebar-logo-icon" style="background: linear-gradient(135deg, var(--brand), var(--brandDark)); display: flex; align-items: center; justify-content: center; color: white; font-weight: 700; font-size: 14px;">TW</div>
|
||||
<div class="sidebar-logo-text">{brand_name}</div>
|
||||
</div>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
|
||||
|
||||
def render_store_selector() -> tuple[int | None, str | None]:
|
||||
"""Render a compact, classy store selector box.
|
||||
|
||||
Returns (tenant_id, label). Also persists selection in session_state.
|
||||
"""
|
||||
st.markdown(
|
||||
"""
|
||||
<div style="
|
||||
margin: 8px 0 12px 0; padding: 16px 18px;
|
||||
border: 1px solid var(--border);
|
||||
background: linear-gradient(135deg,#ffffff, #f8fbff);
|
||||
border-radius: 16px;
|
||||
box-shadow: 0 10px 24px rgba(2,6,23,0.06);
|
||||
">
|
||||
<div style="display:flex; align-items:center; justify-content:space-between; gap:12px;">
|
||||
<div style="display:flex; align-items:center; gap:10px; color: var(--text);">
|
||||
<span style="display:inline-flex;align-items:center;justify-content:center;width:24px;height:24px;border-radius:999px;background:linear-gradient(135deg,#22C55E,#16A34A);box-shadow:0 4px 10px rgba(34,197,94,0.25);">🛍️</span>
|
||||
<div style="font-weight: 700; letter-spacing:-0.01em;">Choose the store you want to view</div>
|
||||
<span style="opacity:0.9">✨</span>
|
||||
</div>
|
||||
<div style="font-size:12px;color:#64748b;">👉 Click a card to select</div>
|
||||
</div>
|
||||
</div>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
|
||||
# Sub caption + clear selection
|
||||
current_label = st.session_state.get("store_label")
|
||||
left_cap, right_clear = st.columns([6, 1])
|
||||
with left_cap:
|
||||
st.caption("Please choose a store before surfing !")
|
||||
if current_label:
|
||||
st.caption(f"Selected: {current_label}")
|
||||
with right_clear:
|
||||
if st.button("Clear", key="clear_store_sel"):
|
||||
st.session_state["tenant_id"] = None
|
||||
st.session_state["store_label"] = None
|
||||
st.experimental_rerun() if hasattr(st, "experimental_rerun") else st.rerun()
|
||||
|
||||
# We no longer use query params; selection happens in-session only
|
||||
chosen_from_query: str | None = None
|
||||
|
||||
# Grid of store boxes (soft gradient cards with per-store colors and emojis)
|
||||
emoji_map = {"PC": "🍷", "IP": "🍻", "ML": "🥂", "CL4": "🍸", "NL": "🥃", "CL6": "🍾", "RC": "🍹", "PL": "🍺"}
|
||||
|
||||
color_rgb = {"PC": (37,99,235), "IP": (22,163,74), "ML": (245,158,11), "CL4": (220,38,38), "NL": (124,58,237), "CL6": (234,179,8), "RC": (6, 182, 212), "PL": (236, 72, 153)}
|
||||
preselect_label = st.session_state.get("store_label")
|
||||
chosen_label = None
|
||||
# No search box; show all stores
|
||||
filtered_stores = STORES
|
||||
# Always render 3 columns per row (e.g., 3 + 3 for 6 stores)
|
||||
rows = [filtered_stores[i:i+3] for i in range(0, len(filtered_stores), 3)]
|
||||
for row in rows:
|
||||
cols = st.columns(3)
|
||||
for i, store in enumerate(row):
|
||||
with cols[i]:
|
||||
icon = emoji_map.get(store["code"], "🏬")
|
||||
r, g, b = color_rgb.get(store["code"], (14,165,233))
|
||||
is_selected = (preselect_label == store["label"]) or (chosen_from_query == store["label"]) # highlight current
|
||||
border_alpha = 0.48 if is_selected else 0.28
|
||||
shadow = "0 18px 42px rgba(2,6,23,0.16)" if is_selected else "0 12px 28px rgba(2,6,23,0.10)"
|
||||
border_width = "2px" if is_selected else "1px"
|
||||
check = " ✅" if is_selected else ""
|
||||
# Render a card-like button that sets selection without changing URL
|
||||
clicked = st.button(
|
||||
f"{icon} {store['label']}{check}",
|
||||
key=f"store_card_{store['code']}",
|
||||
use_container_width=True,
|
||||
type="secondary",
|
||||
)
|
||||
# Lightweight card styling via inline CSS targeting this button
|
||||
st.markdown(
|
||||
f"""
|
||||
<style>
|
||||
div[data-testid='stButton'] button#store_card_{store['code']} {{
|
||||
background: linear-gradient(135deg, rgba({r},{g},{b},0.12), rgba({r},{g},{b},0.20));
|
||||
border: {border_width} solid rgba({r},{g},{b},{border_alpha});
|
||||
border-radius: 18px; padding: 18px; box-shadow: {shadow};
|
||||
color: #0F172A; font-weight: 800; text-align: left;
|
||||
}}
|
||||
div[data-testid='stButton'] button#store_card_{store['code']}:hover {{
|
||||
transform: translateY(-2px); box-shadow: 0 22px 48px rgba(2,6,23,0.18);
|
||||
}}
|
||||
</style>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
if clicked:
|
||||
st.session_state["tenant_id"] = store["tenant_id"]
|
||||
st.session_state["store_label"] = store["label"]
|
||||
chosen_label = store["label"]
|
||||
st.rerun()
|
||||
|
||||
# Resolve tenant_id
|
||||
effective_label = chosen_label or preselect_label
|
||||
selected = next((s for s in STORES if s["label"] == effective_label), None)
|
||||
tenant_id = selected["tenant_id"] if selected else None
|
||||
|
||||
# Persist
|
||||
st.session_state["tenant_id"] = tenant_id
|
||||
st.session_state["store_label"] = selected["label"] if selected else None
|
||||
st.session_state["division_code"] = None
|
||||
|
||||
return tenant_id, (selected["label"] if selected else None)
|
||||
BIN
ascii_report.txt
Normal file
BIN
ascii_report.txt
Normal file
Binary file not shown.
BIN
assets/bg.jpg
Normal file
BIN
assets/bg.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 535 KiB |
BIN
assets/workolik.png
Normal file
BIN
assets/workolik.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.9 KiB |
BIN
db_cols.txt
Normal file
BIN
db_cols.txt
Normal file
Binary file not shown.
38
docker-compose.yml
Normal file
38
docker-compose.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
workolik:
|
||||
build: .
|
||||
container_name: workolik
|
||||
env_file: .env
|
||||
environment:
|
||||
- TZ=Asia/Kolkata
|
||||
- STREAMLIT_LOG_LEVEL=info
|
||||
restart: unless-stopped
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.workolik.rule=Host(`tuckerfresh.workolik.com`)"
|
||||
- "traefik.http.routers.workolik.entrypoints=websecure"
|
||||
- "traefik.http.routers.workolik.tls=true"
|
||||
- "traefik.http.routers.workolik.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.services.workolik.loadbalancer.server.port=8501"
|
||||
networks:
|
||||
- frontend
|
||||
command: ["/bin/sh", "-c", "python scripts/validate_setup.py && echo 'Starting Streamlit...' && streamlit run app.py --server.port=8501 --server.address=0.0.0.0 --server.headless=true"]
|
||||
|
||||
workolik-scheduler:
|
||||
build: .
|
||||
container_name: workolik-scheduler
|
||||
env_file: .env
|
||||
environment:
|
||||
- TZ=Asia/Kolkata
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- frontend
|
||||
command: ["/bin/sh", "-c", "python scripts/validate_setup.py && python scripts/scheduler_standalone.py"]
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
external: true
|
||||
|
||||
|
||||
61
docs/sample_email.txt
Normal file
61
docs/sample_email.txt
Normal file
@@ -0,0 +1,61 @@
|
||||
Subject: Daily Digest - YYYY-MM-DD
|
||||
From: Workolik Team <no-reply@example.com>
|
||||
To: alice@example.com, bob@example.com
|
||||
|
||||
Hello Tucker Fresh,
|
||||
|
||||
Here’s your daily digest of posted transactions.
|
||||
|
||||
Per-store summary (stacked lines: total — event id status):
|
||||
|
||||
Porters Liquor Claremont - PC
|
||||
- Journal: 3,881.24 — 2904783 ✅
|
||||
- Banking Journal: 0.00 — —
|
||||
- Account Sales: 2,116.55 — 188210 ✅
|
||||
- Account Payments: 980.00 — 188050 ✅
|
||||
|
||||
Porters Iluka - IP
|
||||
- Journal: 540.00 — 2905100 ✅
|
||||
- Banking Journal: 0.00 — —
|
||||
- Account Sales: 320.00 — 188305 ✅
|
||||
- Account Payments: 0.00 — —
|
||||
|
||||
Cellarbrations at Morris Place - ML
|
||||
- Journal: 1,592.10 — 2904783 ✅
|
||||
- Banking Journal: 50.00 — 99001 ⚠️
|
||||
- Account Sales: 560.00 — 188210 ✅
|
||||
- Account Payments: 0.00 — —
|
||||
|
||||
Cellarbrations at Lynwood - CL (Store 4)
|
||||
- Journal: 225.00 — 2906000 ✅
|
||||
- Banking Journal: 0.00 — —
|
||||
- Account Sales: Nill
|
||||
- Account Payments: Nill
|
||||
|
||||
Cellarbrations at Nicholson Road - NL
|
||||
- Journal: 410.75 — 2907000 ✅
|
||||
- Banking Journal: 0.00 — —
|
||||
- Account Sales: 120.00 — 188500 ✅
|
||||
- Account Payments: 0.00 — —
|
||||
|
||||
Cellarbrations at Lynwood - CL (Store 6)
|
||||
- Journal: 300.00 — 2908000 ✅
|
||||
- Banking Journal: 0.00 — —
|
||||
- Account Sales: Nill
|
||||
- Account Payments: Nill
|
||||
|
||||
|
||||
Thank you for staying updated with us.
|
||||
|
||||
Best regards,
|
||||
Workolik Team
|
||||
|
||||
---
|
||||
How this mail is triggered (summary):
|
||||
1) scheduler_standalone.py runs a daily job at 20:00 IST (Asia/Kolkata).
|
||||
2) It calls app_core/services/daily_report.py:main().
|
||||
3) That loads the full-day data, builds HTML via MailerService.build_email_html()
|
||||
including the per-store totals and Event IDs matrix, then sends via SMTP.
|
||||
4) Recipients from REPORT_RECIPIENTS; duplicate sends are avoided by DB check
|
||||
against email_logs.date_for.
|
||||
|
||||
8
inspect_db.py
Normal file
8
inspect_db.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from app_core.db.database import engine
|
||||
from sqlalchemy import text
|
||||
import pandas as pd
|
||||
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(text('SELECT * FROM "tenantpostings" LIMIT 1'), conn)
|
||||
print("Columns in tenantpostings:")
|
||||
print(", ".join(df.columns.tolist()))
|
||||
BIN
pages/__pycache__/mailer.cpython-312.pyc
Normal file
BIN
pages/__pycache__/mailer.cpython-312.pyc
Normal file
Binary file not shown.
BIN
pages/__pycache__/mappings.cpython-312.pyc
Normal file
BIN
pages/__pycache__/mappings.cpython-312.pyc
Normal file
Binary file not shown.
BIN
pages/__pycache__/see_logs.cpython-312.pyc
Normal file
BIN
pages/__pycache__/see_logs.cpython-312.pyc
Normal file
Binary file not shown.
BIN
pages/__pycache__/see_payload.cpython-312.pyc
Normal file
BIN
pages/__pycache__/see_payload.cpython-312.pyc
Normal file
Binary file not shown.
118
pages/mailer.py
Normal file
118
pages/mailer.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
from datetime import date, datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
from app_core.services.mailer_service import MailerService
|
||||
|
||||
|
||||
def render_page():
|
||||
if st.session_state.get("auth_user") is None:
|
||||
st.warning("Please login to continue.")
|
||||
st.stop()
|
||||
|
||||
st.markdown("## Mailer")
|
||||
st.caption("Automated daily email sending logs and status.")
|
||||
|
||||
service = MailerService()
|
||||
ist = ZoneInfo("Asia/Kolkata")
|
||||
|
||||
st.markdown("### Scheduler Status")
|
||||
|
||||
# Check if scheduler container is running by checking if daily report was sent today
|
||||
today_logs = [log for log in service.recent_logs(limit=1000) if log.get('date_for') == str(date.today())]
|
||||
daily_report_sent_today = any('Daily Report' in str(log.get('subject', '')) for log in today_logs)
|
||||
|
||||
if daily_report_sent_today:
|
||||
st.success("Scheduler is running - Daily report already sent today")
|
||||
else:
|
||||
# Check if it's past 8 PM IST today
|
||||
now_ist = datetime.now(ist)
|
||||
eight_pm_today = now_ist.replace(hour=20, minute=0, second=0, microsecond=0)
|
||||
|
||||
if now_ist >= eight_pm_today:
|
||||
st.warning("Scheduler is running - Waiting for next scheduled run (8:00 PM IST daily)")
|
||||
else:
|
||||
next_run_ist = eight_pm_today
|
||||
st.success(f"Scheduler is running - Next report will be sent at {next_run_ist.strftime('%B %d, %Y at %I:%M %p IST')}")
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Show system status
|
||||
st.markdown("### System Status")
|
||||
col1, col2, col3 = st.columns(3)
|
||||
|
||||
with col1:
|
||||
st.metric("Total Emails Sent", len([log for log in service.recent_logs(limit=1000) if log.get('status') == 'sent']))
|
||||
|
||||
with col2:
|
||||
failed_count = len([log for log in service.recent_logs(limit=1000) if log.get('status') == 'failed'])
|
||||
st.metric("Failed Sends", failed_count, delta=f"-{failed_count}" if failed_count > 0 else None)
|
||||
|
||||
with col3:
|
||||
today_logs = [log for log in service.recent_logs(limit=1000) if log.get('date_for') == str(date.today())]
|
||||
st.metric("Today's Sends", len(today_logs))
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Manual trigger section
|
||||
st.markdown("### Manual Controls")
|
||||
mcol1, mcol2, mcol3 = st.columns([2, 2, 3])
|
||||
|
||||
with mcol1:
|
||||
target_date = st.date_input("Report Date", value=date.today())
|
||||
|
||||
with mcol2:
|
||||
st.write("") # Spacer
|
||||
force_resend = st.checkbox("Force Resend", value=True, help="Send the report even if it was already sent for this date.")
|
||||
|
||||
with mcol3:
|
||||
st.write("") # Spacer
|
||||
if st.button("Send Report Now", type="primary", use_container_width=True):
|
||||
with st.spinner(f"Sending report for {target_date}..."):
|
||||
try:
|
||||
from app_core.services.daily_report import main as run_daily_report
|
||||
# Pass the selected date and force flag
|
||||
result = run_daily_report(for_date=str(target_date), force=force_resend)
|
||||
if result == 0:
|
||||
st.success(f"Report for {target_date} sent successfully!")
|
||||
st.rerun()
|
||||
else:
|
||||
st.error(f"Failed to send report (exit code: {result})")
|
||||
except Exception as e:
|
||||
st.error(f"Error: {str(e)}")
|
||||
|
||||
st.caption("Select a date to manually trigger or re-trigger the daily report email. This is useful for reconciliations.")
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Show email logs table
|
||||
st.markdown("### Email Logs")
|
||||
logs = service.recent_logs(limit=100)
|
||||
if not logs:
|
||||
st.info("No email logs yet. Automated emails will appear here once sent.")
|
||||
else:
|
||||
df_logs = pd.DataFrame(logs)
|
||||
col_map = {
|
||||
"id": "ID",
|
||||
"sent_at": "Sent At",
|
||||
"recipients": "Recipients",
|
||||
"subject": "Subject",
|
||||
"status": "Status",
|
||||
"error": "Error",
|
||||
"date_for": "Report Date",
|
||||
}
|
||||
df_logs = df_logs[["id", "sent_at", "date_for", "recipients", "subject", "status", "error"]]
|
||||
df_logs = df_logs.rename(columns=col_map)
|
||||
|
||||
# Add status styling
|
||||
def style_status(val):
|
||||
if val == 'sent':
|
||||
return 'background-color: #D1FAE5; color: #065F46; font-weight: 600;'
|
||||
elif val == 'failed':
|
||||
return 'background-color: #FEE2E2; color: #991B1B; font-weight: 600;'
|
||||
return ''
|
||||
|
||||
styled_logs = df_logs.style.map(style_status, subset=['Status'])
|
||||
st.dataframe(styled_logs, use_container_width=True, height=400)
|
||||
|
||||
# trigger reload
|
||||
189
pages/mappings.py
Normal file
189
pages/mappings.py
Normal file
@@ -0,0 +1,189 @@
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
from app_core.services.mappings_service import MappingsService
|
||||
from app_core.config.settings import STORES
|
||||
|
||||
|
||||
def render_page():
|
||||
if st.session_state.get("auth_user") is None:
|
||||
st.warning("Please login to continue.")
|
||||
st.stop()
|
||||
|
||||
st.markdown("""
|
||||
<style>
|
||||
.stApp { font-size: 1.05rem; }
|
||||
[data-testid="stDataEditor"] { font-size: 1.05rem !important; }
|
||||
h2 { font-weight: 700 !important; letter-spacing: -0.02em !important; }
|
||||
h3 { font-weight: 600 !important; color: #6366f1 !important; margin-top: 1.2rem !important; }
|
||||
.store-pill {
|
||||
display: inline-block;
|
||||
padding: 4px 14px;
|
||||
border-radius: 20px;
|
||||
font-size: 0.85rem;
|
||||
font-weight: 600;
|
||||
margin: 3px 4px;
|
||||
background: linear-gradient(135deg, #6366f1, #8b5cf6);
|
||||
color: white;
|
||||
}
|
||||
</style>
|
||||
""", unsafe_allow_html=True)
|
||||
|
||||
st.markdown("## 📋 Triumph Debtor Mappings")
|
||||
st.caption("Manage POS account sale mappings to Triumph debtor codes — filtered by store.")
|
||||
|
||||
service = MappingsService()
|
||||
all_mappings = service.get_all_mappings()
|
||||
|
||||
# Store labels from config — used only for the "Add New" dropdown
|
||||
store_labels = [s["label"] for s in STORES]
|
||||
|
||||
tab1, tab2 = st.tabs(["🔍 View & Search", "➕ Add New Mapping"])
|
||||
|
||||
# ── TAB 1: View & Edit ────────────────────────────────────────────────────
|
||||
with tab1:
|
||||
st.markdown("### 🔍 Current Mappings")
|
||||
|
||||
if not all_mappings:
|
||||
st.info("No mappings found. Use the '➕ Add New Mapping' tab to create one.")
|
||||
else:
|
||||
# Build dataframe from raw DB values
|
||||
data = [
|
||||
{
|
||||
"ID": m.id,
|
||||
"POS Code": m.code or "",
|
||||
"Account Name": m.name or "",
|
||||
"Triumph Code": m.dbmacc or "",
|
||||
"Outlet": (m.outlet or "").strip(),
|
||||
"Created At": m.created_at.strftime("%Y-%m-%d %H:%M") if m.created_at else "—",
|
||||
"Updated At": m.updated_at.strftime("%Y-%m-%d %H:%M") if m.updated_at else "—",
|
||||
}
|
||||
for m in all_mappings
|
||||
]
|
||||
df_full = pd.DataFrame(data)
|
||||
|
||||
# Distinct outlet names actually in DB
|
||||
distinct_outlets = sorted([
|
||||
o for o in df_full["Outlet"].dropna().unique().tolist() if o.strip()
|
||||
])
|
||||
|
||||
f1, f2 = st.columns([1, 2])
|
||||
with f1:
|
||||
selected_store = st.selectbox(
|
||||
"🏪 Filter by Store",
|
||||
options=["All Stores"] + distinct_outlets,
|
||||
index=0,
|
||||
)
|
||||
with f2:
|
||||
search_query = st.text_input(
|
||||
"🔎 Search",
|
||||
placeholder="POS Code, Account Name, or Triumph Code…",
|
||||
)
|
||||
|
||||
df = df_full.copy()
|
||||
|
||||
if selected_store != "All Stores":
|
||||
df = df[df["Outlet"] == selected_store]
|
||||
|
||||
if search_query:
|
||||
q = search_query
|
||||
df = df[
|
||||
df["POS Code"].str.contains(q, case=False, na=False) |
|
||||
df["Account Name"].str.contains(q, case=False, na=False) |
|
||||
df["Triumph Code"].str.contains(q, case=False, na=False)
|
||||
]
|
||||
|
||||
store_label = selected_store if selected_store != "All Stores" else "all stores"
|
||||
st.caption(f"Showing **{len(df)}** mapping(s) for **{store_label}**.")
|
||||
|
||||
st.markdown("#### 📝 Edit Mappings")
|
||||
st.caption("Double-click any editable cell to modify. Changes are saved when you press Enter.")
|
||||
|
||||
st.data_editor(
|
||||
df,
|
||||
hide_index=True,
|
||||
use_container_width=True,
|
||||
num_rows="dynamic",
|
||||
disabled=["ID", "Created At", "Updated At"],
|
||||
column_config={
|
||||
"ID": st.column_config.NumberColumn(format="%d", width="small"),
|
||||
"POS Code": st.column_config.TextColumn(max_chars=50, width="medium"),
|
||||
"Account Name": st.column_config.TextColumn(max_chars=255, width="large"),
|
||||
"Triumph Code": st.column_config.TextColumn(max_chars=50, width="medium"),
|
||||
"Outlet": st.column_config.TextColumn(max_chars=255, width="large"),
|
||||
"Created At": st.column_config.TextColumn(width="medium"),
|
||||
"Updated At": st.column_config.TextColumn(width="medium"),
|
||||
},
|
||||
key="mapping_editor_v2",
|
||||
)
|
||||
|
||||
if st.session_state.get("mapping_editor_v2"):
|
||||
edited_rows = st.session_state.mapping_editor_v2.get("edited_rows", {})
|
||||
deleted_rows = st.session_state.mapping_editor_v2.get("deleted_rows", [])
|
||||
|
||||
if edited_rows or deleted_rows:
|
||||
changes_made = False
|
||||
|
||||
for idx, patch in edited_rows.items():
|
||||
mapping_id = df.iloc[idx]["ID"]
|
||||
row = df.iloc[idx]
|
||||
new_code = patch.get("POS Code", row["POS Code"])
|
||||
new_name = patch.get("Account Name", row["Account Name"])
|
||||
new_triumph = patch.get("Triumph Code", row["Triumph Code"])
|
||||
new_outlet = patch.get("Outlet", row["Outlet"])
|
||||
if service.update_mapping(mapping_id, new_code, new_name, new_triumph, new_outlet):
|
||||
changes_made = True
|
||||
|
||||
for idx in deleted_rows:
|
||||
if service.delete_mapping(df.iloc[idx]["ID"]):
|
||||
changes_made = True
|
||||
|
||||
if changes_made:
|
||||
st.toast("✅ Mappings updated and synced!", icon="🚀")
|
||||
st.rerun()
|
||||
|
||||
# ── TAB 2: Add New ────────────────────────────────────────────────────────
|
||||
with tab2:
|
||||
st.markdown("### ➕ Create New Mapping")
|
||||
st.caption("All fields are mandatory.")
|
||||
|
||||
with st.form("new_mapping_form", clear_on_submit=True):
|
||||
c1, c2 = st.columns(2)
|
||||
with c1:
|
||||
new_code = st.text_input("POS Code", placeholder="e.g. 0273",
|
||||
help="Unique identifier from your POS system.")
|
||||
new_name = st.text_input("Account Sale Name", placeholder="e.g. Suriya",
|
||||
help="The name as it appears on account invoices.")
|
||||
with c2:
|
||||
new_triumph = st.text_input("Triumph Debtor Code (DBMACC#)", placeholder="e.g. SURI0273",
|
||||
help="The debtor code in Triumph ERP.")
|
||||
new_outlet = st.selectbox(
|
||||
"Store / Outlet",
|
||||
options=["Select a Store"] + store_labels,
|
||||
index=0,
|
||||
help="Select the store this mapping belongs to.",
|
||||
)
|
||||
|
||||
st.markdown("<br>", unsafe_allow_html=True)
|
||||
if st.form_submit_button("Create Mapping", type="primary", use_container_width=True):
|
||||
if not all([new_code.strip(), new_name.strip(), new_triumph.strip()]) or new_outlet == "— Select a Store —":
|
||||
st.error("⚠️ All fields are required — including selecting a store.")
|
||||
else:
|
||||
service.create_mapping(new_code.strip(), new_name.strip(), new_triumph.strip(), new_outlet)
|
||||
st.success(f"✅ Mapping for **{new_name}** created under **{new_outlet}**!")
|
||||
st.balloons()
|
||||
st.rerun()
|
||||
|
||||
st.markdown("---")
|
||||
with st.expander("📖 Field definitions"):
|
||||
st.write("""
|
||||
- **POS Code** — Unique identifier from your POS system.
|
||||
- **Account Name** — Name used on account sales invoices.
|
||||
- **Triumph Code (DBMACC#)** — Corresponding debtor code in Triumph ERP.
|
||||
- **Store / Outlet** — Store this mapping is assigned to.
|
||||
|
||||
*Any change here is immediately picked up by the background event processor.*
|
||||
""")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
render_page()
|
||||
380
pages/see_logs.py
Normal file
380
pages/see_logs.py
Normal file
@@ -0,0 +1,380 @@
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
import plotly.graph_objects as go
|
||||
from datetime import datetime, date, timedelta
|
||||
from app_core.db.database import engine
|
||||
from sqlalchemy import text
|
||||
from app_core.ui.layout import render_store_selector
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_available_dates(tenant_id: int, days_back: int = 60):
|
||||
"""Load available dates with data for the tenant."""
|
||||
with engine.connect() as conn:
|
||||
dates_df = pd.read_sql(
|
||||
'SELECT "created_at"::date AS d, COUNT(*) AS c\n'
|
||||
'FROM "tenantpostings"\n'
|
||||
'WHERE "created_at" >= (CURRENT_DATE - INTERVAL \'60 days\') AND "tenant_id" = %(t)s\n'
|
||||
'GROUP BY d\n'
|
||||
'ORDER BY d DESC',
|
||||
conn,
|
||||
params={"t": tenant_id},
|
||||
)
|
||||
if not pd.api.types.is_datetime64_any_dtype(dates_df['d']):
|
||||
dates_df['d'] = pd.to_datetime(dates_df['d'], errors='coerce')
|
||||
return dates_df
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_daily_data(tenant_id: int, target_date: date):
|
||||
"""Load daily data for a specific tenant and date."""
|
||||
day_sql = (
|
||||
'SELECT * FROM "tenantpostings" '
|
||||
'WHERE "created_at"::date = %(d)s AND "tenant_id" = %(t)s '
|
||||
'ORDER BY "id" DESC '
|
||||
'LIMIT 10000'
|
||||
)
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(day_sql, conn, params={"d": target_date, "t": tenant_id})
|
||||
|
||||
# De-duplicate by triumph_event to avoid logical doubling
|
||||
if not df.empty and 'triumph_event' in df.columns:
|
||||
has_event = df['triumph_event'].fillna('').astype(str).str.strip() != ''
|
||||
df_with_events = df[has_event].sort_values(['processing_type', 'triumph_event', 'id'], ascending=[True, True, False]).drop_duplicates(subset=['processing_type', 'triumph_event'], keep='first')
|
||||
df_no_events = df[~has_event]
|
||||
df = pd.concat([df_with_events, df_no_events]).sort_values('id', ascending=False)
|
||||
|
||||
return df
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_trend_data(tenant_id: int, days_back: int = 30):
|
||||
"""Load trend data for charts."""
|
||||
with engine.connect() as conn:
|
||||
totals_agg = pd.read_sql(
|
||||
'SELECT "created_at"::date AS d, SUM("total_amount") AS total\n'
|
||||
'FROM "tenantpostings"\n'
|
||||
'WHERE "created_at" >= (CURRENT_DATE - INTERVAL \'30 days\') AND "tenant_id" = %(t)s\n'
|
||||
" AND UPPER(COALESCE(\"processing_type\", '')) = 'JOURNAL'\n"
|
||||
'GROUP BY d\n'
|
||||
'ORDER BY d ASC',
|
||||
conn,
|
||||
params={"t": tenant_id},
|
||||
)
|
||||
if not pd.api.types.is_datetime64_any_dtype(totals_agg['d']):
|
||||
totals_agg['d'] = pd.to_datetime(totals_agg['d'], errors='coerce')
|
||||
return totals_agg
|
||||
|
||||
|
||||
def _normalize_name(name: str) -> str:
|
||||
return "".join(ch for ch in name.lower() if ch.isalnum())
|
||||
|
||||
|
||||
def _build_display_map(df: pd.DataFrame) -> dict[str, str]:
|
||||
overrides = {
|
||||
"triumph_status": "Status",
|
||||
"triumph_event": "Event",
|
||||
"outlet_name": "Outlet Name",
|
||||
"tenant_id": "Tenant ID",
|
||||
"processing_type": "Processing Type",
|
||||
"total_amount": "Total Amount",
|
||||
"created_at": "Date",
|
||||
"updated_at": "Updated At",
|
||||
"id": "SNo",
|
||||
}
|
||||
display_map: dict[str, str] = {}
|
||||
used: set[str] = set()
|
||||
for col in df.columns:
|
||||
key = col.lower()
|
||||
if key in overrides:
|
||||
display_name = overrides[key]
|
||||
else:
|
||||
# Convert snake_case to Title Case
|
||||
display_name = col.replace("_", " ").title()
|
||||
|
||||
# Ensure unique display names
|
||||
final_name = display_name
|
||||
counter = 1
|
||||
while final_name in used:
|
||||
final_name = f"{display_name} ({counter})"
|
||||
counter += 1
|
||||
|
||||
display_map[col] = final_name
|
||||
used.add(final_name)
|
||||
return display_map
|
||||
|
||||
|
||||
def _pick_existing_columns(df: pd.DataFrame, names: list[str]) -> list[str]:
|
||||
"""Pick columns that exist in the DataFrame from a list of names."""
|
||||
found = []
|
||||
for name in names:
|
||||
if name in df.columns:
|
||||
found.append(name)
|
||||
return found
|
||||
|
||||
|
||||
def _format_date_columns(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""Format date columns to show only date part"""
|
||||
df_formatted = df.copy()
|
||||
for col in df_formatted.columns:
|
||||
if 'created_at' in col.lower() or 'date' in col.lower():
|
||||
if pd.api.types.is_datetime64_any_dtype(df_formatted[col]):
|
||||
df_formatted[col] = df_formatted[col].dt.date
|
||||
else:
|
||||
# Try to convert to datetime first
|
||||
try:
|
||||
df_formatted[col] = pd.to_datetime(df_formatted[col]).dt.date
|
||||
except:
|
||||
pass
|
||||
return df_formatted
|
||||
|
||||
|
||||
def _journal_total(frame: pd.DataFrame) -> float:
|
||||
"""Sum total_amount for JOURNAL rows only."""
|
||||
if frame is None or frame.empty or 'total_amount' not in frame.columns:
|
||||
return 0.0
|
||||
|
||||
# We assume 'frame' is already de-duplicated by triumph_event at load time
|
||||
if 'processing_type' in frame.columns:
|
||||
mask = frame['processing_type'].astype(str).str.upper() == 'JOURNAL'
|
||||
frame = frame[mask]
|
||||
|
||||
return float(frame['total_amount'].sum()) if not frame.empty else 0.0
|
||||
|
||||
|
||||
def _stat_card(title: str, value: str, color: str, icon: str) -> str:
|
||||
return f"""
|
||||
<div style="
|
||||
background: {color}15;
|
||||
border: 1px solid {color}30;
|
||||
border-radius: 12px;
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
box-shadow: 0 4px 12px {color}20;
|
||||
transition: transform 0.2s ease, box-shadow 0.2s ease;
|
||||
cursor: pointer;
|
||||
" onmouseover="this.style.transform='translateY(-2px)'; this.style.boxShadow='0 8px 20px {color}30';"
|
||||
onmouseout="this.style.transform='translateY(0px)'; this.style.boxShadow='0 4px 12px {color}20';">
|
||||
<div style="font-size: 24px; margin-bottom: 8px;">{icon}</div>
|
||||
<div style="font-size: 24px; font-weight: 700; color: {color}; margin-bottom: 4px;">{value}</div>
|
||||
<div style="font-size: 14px; color: #64748b; font-weight: 600;">{title}</div>
|
||||
</div>
|
||||
"""
|
||||
|
||||
|
||||
def render_page():
|
||||
if st.session_state.get("auth_user") is None:
|
||||
st.warning("Please login to continue.")
|
||||
st.stop()
|
||||
|
||||
# Store selector (must be chosen before loading analytics)
|
||||
tenant_id, store_label = render_store_selector()
|
||||
if not tenant_id:
|
||||
st.info("Please choose a store to view analytics.")
|
||||
return
|
||||
|
||||
st.markdown("## 📊 Dashboard")
|
||||
# Date picker for selecting any date
|
||||
picker_col1, _ = st.columns([1, 3])
|
||||
with picker_col1:
|
||||
selected_date = st.date_input("Report date", value=date.today(), max_value=date.today())
|
||||
st.markdown("---")
|
||||
|
||||
# Target date preference is today, but we will fall back to most recent date with data
|
||||
today = date.today()
|
||||
|
||||
# Database connection with caching
|
||||
try:
|
||||
# Find most recent available dates with data (last 60 days)
|
||||
dates_df = _load_available_dates(tenant_id)
|
||||
|
||||
if dates_df.empty:
|
||||
st.warning("No data available in the last 60 days.")
|
||||
return
|
||||
|
||||
# Prefer the user-selected date if present; else pick the most recent date
|
||||
available_dates = list(dates_df['d'].dt.date)
|
||||
if selected_date in available_dates:
|
||||
date_shown = selected_date
|
||||
else:
|
||||
date_shown = available_dates[0]
|
||||
|
||||
# Comparison dates: the most recent prior dates (up to two)
|
||||
prior_dates = [d for d in available_dates if d < date_shown]
|
||||
compare_date = prior_dates[0] if prior_dates else None
|
||||
compare_date2 = prior_dates[1] if len(prior_dates) > 1 else None
|
||||
|
||||
# Load frames using cached functions
|
||||
df = _load_daily_data(tenant_id, date_shown)
|
||||
df_compare = _load_daily_data(tenant_id, compare_date) if compare_date else pd.DataFrame()
|
||||
df_compare2 = _load_daily_data(tenant_id, compare_date2) if compare_date2 else pd.DataFrame()
|
||||
|
||||
if date_shown == selected_date:
|
||||
st.success(f"📅 Showing data for {date_shown.strftime('%B %d, %Y')} ({len(df):,} records)")
|
||||
else:
|
||||
st.info(f"📅 Showing most recent data: {date_shown.strftime('%B %d, %Y')} ({len(df):,} records)")
|
||||
|
||||
except Exception as e:
|
||||
st.error(f"Database connection failed: {str(e)}")
|
||||
return
|
||||
|
||||
# Calculate key metrics (Total Amount uses JOURNAL only)
|
||||
total_amount = _journal_total(df)
|
||||
total_transactions = len(df)
|
||||
success_count = len(df[df['triumph_status'] == 'success']) if 'triumph_status' in df.columns else 0
|
||||
failed_count = len(df[df['triumph_status'] == 'failed']) if 'triumph_status' in df.columns else 0
|
||||
pending_count = len(df[df['triumph_status'] == 'pending']) if 'triumph_status' in df.columns else 0
|
||||
|
||||
# Status summary cards
|
||||
st.markdown("### 📈 Today's Overview")
|
||||
col1, col2, col3, col4, col5 = st.columns(5)
|
||||
|
||||
with col1:
|
||||
st.markdown(_stat_card("Total Amount", f"${total_amount:,.2f}", "#059669", "💰"), unsafe_allow_html=True)
|
||||
with col2:
|
||||
st.markdown(_stat_card("Transactions", f"{total_transactions:,}", "#2563EB", "📊"), unsafe_allow_html=True)
|
||||
with col3:
|
||||
st.markdown(_stat_card("Success", f"{success_count:,}", "#059669", "✅"), unsafe_allow_html=True)
|
||||
with col4:
|
||||
st.markdown(_stat_card("Failed", f"{failed_count:,}", "#DC2626", "❌"), unsafe_allow_html=True)
|
||||
with col5:
|
||||
st.markdown(_stat_card("Pending", f"{pending_count:,}", "#D97706", "⏳"), unsafe_allow_html=True)
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Build simple frames map for totals lookup
|
||||
frames_by_date = {date_shown: df}
|
||||
if 'df_compare' in locals() and compare_date:
|
||||
frames_by_date[compare_date] = df_compare
|
||||
if 'df_compare2' in locals() and 'compare_date2' in locals() and compare_date2:
|
||||
frames_by_date[compare_date2] = df_compare2
|
||||
|
||||
# Stock-like trend line: last 14 days JOURNAL totals, with last 3 days highlighted
|
||||
st.markdown("### 📈 Sales Trend")
|
||||
totals_agg = _load_trend_data(tenant_id)
|
||||
last14 = totals_agg.tail(14).copy() if not totals_agg.empty else pd.DataFrame(columns=['d','total'])
|
||||
if not last14.empty:
|
||||
x_labels = last14['d'].dt.strftime('%b %d')
|
||||
fig_line = go.Figure()
|
||||
fig_line.add_trace(
|
||||
go.Scatter(
|
||||
x=x_labels,
|
||||
y=last14['total'],
|
||||
mode='lines+markers',
|
||||
name='Sales',
|
||||
line=dict(color="#2563EB", width=2.6),
|
||||
marker=dict(size=4, color="#2563EB"),
|
||||
line_shape='spline',
|
||||
hovertemplate="%{x}<br>$%{y:,.2f}<extra></extra>",
|
||||
)
|
||||
)
|
||||
# Highlight last 3 points
|
||||
last3 = last14.tail(3).reset_index(drop=True)
|
||||
colors = ["#94A3B8", "#DC2626", "#16A34A"] # old->gray, prev->red, latest->green
|
||||
labels = ["Prev-2", "Prev", "Latest"]
|
||||
for i in range(len(last3)):
|
||||
fig_line.add_trace(
|
||||
go.Scatter(
|
||||
x=[last3['d'].dt.strftime('%b %d').iloc[i]],
|
||||
y=[last3['total'].iloc[i]],
|
||||
mode='markers',
|
||||
name=labels[i],
|
||||
marker=dict(color=colors[i], size=9, symbol='circle'),
|
||||
hovertemplate=f"{labels[i]}: %{{x}}<br>$%{{y:,.2f}}<extra></extra>",
|
||||
)
|
||||
)
|
||||
# Profit/Loss vs previous day
|
||||
if len(last3) >= 2 and last3['total'].iloc[1] != 0:
|
||||
shown_total = float(last3['total'].iloc[2]) if len(last3) == 3 else float(last3['total'].iloc[-1])
|
||||
prev_total = float(last3['total'].iloc[-2])
|
||||
delta = (shown_total - prev_total) / prev_total * 100.0
|
||||
arrow = '▲' if delta >= 0 else '▼'
|
||||
color = '#16A34A' if delta >= 0 else '#DC2626'
|
||||
fig_line.add_annotation(
|
||||
x=1, y=1.1, xref='paper', yref='paper', showarrow=False,
|
||||
text=f"{arrow} {delta:.1f}% vs {last3['d'].dt.strftime('%b %d').iloc[-2]}",
|
||||
font=dict(color=color, size=14), align='right'
|
||||
)
|
||||
fig_line.update_layout(
|
||||
height=320,
|
||||
showlegend=True,
|
||||
yaxis_title="Total Amount ($)",
|
||||
xaxis_title=None,
|
||||
margin=dict(t=30, b=30, l=30, r=20),
|
||||
plot_bgcolor='white',
|
||||
hovermode='x unified'
|
||||
)
|
||||
fig_line.update_yaxes(showgrid=True, gridcolor='#E5E7EB', zeroline=False)
|
||||
fig_line.update_xaxes(showgrid=False, zeroline=False)
|
||||
st.plotly_chart(fig_line, use_container_width=True)
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Data table section
|
||||
st.markdown("### 📋 Detailed Data")
|
||||
|
||||
# Minimal columns for default view
|
||||
minimal_names = [
|
||||
"id",
|
||||
"created_at",
|
||||
"outlet_name",
|
||||
"processing_type",
|
||||
"total_amount",
|
||||
"triumph_status",
|
||||
"triumph_event",
|
||||
]
|
||||
minimal_cols = _pick_existing_columns(df, minimal_names)
|
||||
|
||||
# Controls row: search only
|
||||
q = st.text_input("Search", placeholder="Type to filter rows across all columns")
|
||||
|
||||
# Filter data based on search
|
||||
if q:
|
||||
mask = df.astype(str).apply(lambda x: x.str.contains(q, case=False, na=False)).any(axis=1)
|
||||
df_filtered = df[mask]
|
||||
else:
|
||||
df_filtered = df.copy()
|
||||
|
||||
# Always use minimal columns
|
||||
display_cols = minimal_cols if minimal_cols else list(df_filtered.columns[:8])
|
||||
|
||||
# Build display names
|
||||
display_map = _build_display_map(df_filtered)
|
||||
|
||||
# Format the display dataframe
|
||||
df_display = df_filtered[display_cols].copy()
|
||||
df_display.columns = [display_map.get(col, col) for col in display_cols]
|
||||
# Format date columns
|
||||
df_display = _format_date_columns(df_display)
|
||||
|
||||
# Format numeric columns
|
||||
for col in df_display.columns:
|
||||
if 'amount' in col.lower() and df_display[col].dtype in ['float64', 'int64']:
|
||||
df_display[col] = df_display[col].apply(lambda x: f"${x:,.2f}" if pd.notna(x) else "")
|
||||
|
||||
# Always apply status styling
|
||||
if 'Status' in df_display.columns:
|
||||
def style_status(val):
|
||||
if val == 'success':
|
||||
return 'background-color: #D1FAE5; color: #065F46; font-weight: 600;'
|
||||
elif val == 'failed':
|
||||
return 'background-color: #FEE2E2; color: #991B1B; font-weight: 600;'
|
||||
elif val == 'pending':
|
||||
return 'background-color: #FEF3C7; color: #92400E; font-weight: 600;'
|
||||
return ''
|
||||
|
||||
styled_df = df_display.style.map(style_status, subset=['Status'])
|
||||
st.dataframe(styled_df, use_container_width=True, height=400)
|
||||
else:
|
||||
st.dataframe(df_display, use_container_width=True, height=400)
|
||||
|
||||
# Download button
|
||||
if st.button("📥 Download Today's Data as CSV", type="primary"):
|
||||
csv = df_filtered.to_csv(index=False)
|
||||
st.download_button(
|
||||
label="Download CSV",
|
||||
data=csv,
|
||||
file_name=f"workolik_data_{date_shown.strftime('%Y%m%d')}.csv",
|
||||
mime="text/csv"
|
||||
)
|
||||
285
pages/see_payload.py
Normal file
285
pages/see_payload.py
Normal file
@@ -0,0 +1,285 @@
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
from sqlalchemy import text
|
||||
from app_core.db.database import engine
|
||||
from app_core.ui.layout import render_store_selector
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_tenant_data(tenant_id: int, limit: int = 10000):
|
||||
"""Load data for a specific tenant with caching."""
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(
|
||||
text('SELECT * FROM "tenantpostings" WHERE "tenant_id" = :t ORDER BY "id" DESC LIMIT :limit'),
|
||||
conn,
|
||||
params={"t": tenant_id, "limit": limit},
|
||||
)
|
||||
return df
|
||||
|
||||
|
||||
def _detect_status_column(df: pd.DataFrame) -> str | None:
|
||||
candidates = ["status", "state", "result", "triumph_status"]
|
||||
lower_map = {c.lower(): c for c in df.columns}
|
||||
for key in candidates:
|
||||
if key in lower_map:
|
||||
return lower_map[key]
|
||||
for c in df.columns:
|
||||
if "status" in c.lower():
|
||||
return c
|
||||
return None
|
||||
|
||||
|
||||
def _normalize_name(name: str) -> str:
|
||||
return "".join(ch for ch in name.lower() if ch.isalnum())
|
||||
|
||||
|
||||
def _build_display_map(df: pd.DataFrame) -> dict[str, str]:
|
||||
overrides = {
|
||||
"triumph_status": "Status",
|
||||
"triumph_event": "Event",
|
||||
"outlet_name": "Outlet Name",
|
||||
"tenant_id": "Tenant ID",
|
||||
"processing_type": "Processing Type",
|
||||
"total_amount": "Total Amount",
|
||||
"created_at": "Date",
|
||||
"updated_at": "Updated At",
|
||||
"id": "SNo",
|
||||
}
|
||||
display_map: dict[str, str] = {}
|
||||
used: set[str] = set()
|
||||
for col in df.columns:
|
||||
key = col.lower()
|
||||
if key in overrides:
|
||||
label = overrides[key]
|
||||
else:
|
||||
label = col.replace("_", " ").title()
|
||||
base = label
|
||||
suffix = 2
|
||||
while label in used:
|
||||
label = f"{base} {suffix}"
|
||||
suffix += 1
|
||||
used.add(label)
|
||||
display_map[col] = label
|
||||
return display_map
|
||||
|
||||
|
||||
def _format_status_with_emoji(styler: "pd.io.formats.style.Styler", df: pd.DataFrame, status_col: str | None) -> "pd.io.formats.style.Styler":
|
||||
if status_col is None or status_col not in df.columns:
|
||||
return styler
|
||||
|
||||
def fmt(val):
|
||||
v = str(val)
|
||||
v_lower = v.lower()
|
||||
if any(k in v_lower for k in ["success", "ok", "completed", "done", "active"]):
|
||||
return f"✅ {v}"
|
||||
if any(k in v_lower for k in ["fail", "error", "dead", "invalid"]):
|
||||
return f"❌ {v}"
|
||||
if any(k in v_lower for k in ["pending", "queue", "waiting", "processing"]):
|
||||
return f"⏳ {v}"
|
||||
return v
|
||||
|
||||
return styler.format({status_col: fmt})
|
||||
|
||||
|
||||
def _badge_status_cells(styler: "pd.io.formats.style.Styler", df: pd.DataFrame, status_col: str | None) -> "pd.io.formats.style.Styler":
|
||||
if status_col is None or status_col not in df.columns:
|
||||
return styler
|
||||
|
||||
def badge(val):
|
||||
v = str(val).lower()
|
||||
bg = "#E2E8F0"; color = "#0F172A"
|
||||
if any(k in v for k in ["success", "ok", "completed", "done", "active"]):
|
||||
bg = "#E6F7EE"; color = "#166534"
|
||||
elif any(k in v for k in ["fail", "error", "dead", "invalid"]):
|
||||
bg = "#FDECEC"; color = "#991B1B"
|
||||
elif any(k in v for k in ["pending", "queue", "waiting", "processing"]):
|
||||
bg = "#FEF5E6"; color = "#92400E"
|
||||
return f"background-color: {bg}; color:{color}; border-radius: 999px; padding: 4px 8px;"
|
||||
|
||||
return styler.map(badge, subset=pd.IndexSlice[:, [status_col]])
|
||||
|
||||
|
||||
def _zebra_style(df: pd.DataFrame) -> "pd.io.formats.style.Styler":
|
||||
df2 = df.reset_index(drop=True)
|
||||
|
||||
def zebra(row: pd.Series):
|
||||
return ["background-color: rgba(2,6,23,0.03);" if (row.name % 2 == 0) else ""] * len(row)
|
||||
|
||||
styler = df2.style.apply(zebra, axis=1)
|
||||
styler = styler.set_table_styles([
|
||||
{"selector": "th", "props": "position: sticky; top: 0; background: #F0F6FF; color:#0F172A; font-weight:700;"},
|
||||
{"selector": "tbody td", "props": "border-top: 1px solid rgba(15,23,42,0.06);"},
|
||||
{"selector": "table", "props": "border-collapse: separate; border-spacing: 0;"},
|
||||
])
|
||||
styler = styler.hide(axis="index")
|
||||
return styler
|
||||
|
||||
|
||||
def _format_two_decimals_for_amounts(styler: "pd.io.formats.style.Styler", df: pd.DataFrame) -> "pd.io.formats.style.Styler":
|
||||
candidates_norm = {"totalamount", "total_amount", "amount", "totalamounts", "totalamounttotals"}
|
||||
targets = []
|
||||
for c in df.columns:
|
||||
if _normalize_name(c) in candidates_norm and pd.api.types.is_numeric_dtype(df[c]):
|
||||
targets.append(c)
|
||||
if targets:
|
||||
styler = styler.format(formatter="{:.2f}", subset=pd.IndexSlice[:, targets])
|
||||
return styler
|
||||
|
||||
|
||||
def _format_date_columns(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""Format date columns to show only date part"""
|
||||
df_formatted = df.copy()
|
||||
for col in df_formatted.columns:
|
||||
if 'created_at' in col.lower() or 'date' in col.lower():
|
||||
if pd.api.types.is_datetime64_any_dtype(df_formatted[col]):
|
||||
df_formatted[col] = df_formatted[col].dt.date
|
||||
else:
|
||||
# Try to convert to datetime first
|
||||
try:
|
||||
df_formatted[col] = pd.to_datetime(df_formatted[col]).dt.date
|
||||
except:
|
||||
pass
|
||||
return df_formatted
|
||||
|
||||
|
||||
def _pick_existing_columns(df: pd.DataFrame, names: list[str]) -> list[str]:
|
||||
lower_map = {c.lower(): c for c in df.columns}
|
||||
picked = []
|
||||
for n in names:
|
||||
if n.lower() in lower_map:
|
||||
picked.append(lower_map[n.lower()])
|
||||
return picked
|
||||
|
||||
|
||||
def _stat_card(title: str, value: int | str, color: str, emoji: str) -> str:
|
||||
return f"""
|
||||
<div class=\"stat-card\" style=\"display:flex;align-items:center;gap:12px;padding:14px 16px;border-radius:14px;background:#fff;border:1px solid rgba(15,23,42,0.06);box-shadow:0 10px 24px rgba(2,6,23,0.08);transition:transform .15s ease, box-shadow .15s ease;\">
|
||||
<div style=\"font-size:20px;\">{emoji}</div>
|
||||
<div>
|
||||
<div style=\"font-size:12px;color:#64748b;\">{title}</div>
|
||||
<div style=\"font-size:20px;font-weight:800;color:{color};\">{value}</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
|
||||
|
||||
def render_page():
|
||||
if st.session_state.get("auth_user") is None:
|
||||
st.warning("Please login to continue.")
|
||||
st.stop()
|
||||
|
||||
# Store selector (required before loading data view)
|
||||
tenant_id, _ = render_store_selector()
|
||||
if not tenant_id:
|
||||
st.info("Please choose a store to view data.")
|
||||
return
|
||||
|
||||
st.markdown(
|
||||
"""
|
||||
<style>
|
||||
.stat-card:hover{transform:translateY(-2px);box-shadow:0 16px 36px rgba(2,6,23,0.12)}
|
||||
.stat-row{margin-bottom:14px;}
|
||||
.block-after-stats{margin-top:10px;}
|
||||
</style>
|
||||
""",
|
||||
unsafe_allow_html=True,
|
||||
)
|
||||
|
||||
st.title("DataHub")
|
||||
st.caption("Inspect data from Warehouse.")
|
||||
|
||||
st.info("Connected to database ✅.")
|
||||
|
||||
df = _load_tenant_data(tenant_id)
|
||||
|
||||
status_col_global = _detect_status_column(df)
|
||||
|
||||
if status_col_global:
|
||||
s = df[status_col_global].astype(str).str.lower()
|
||||
ok = s.str_contains("success|ok|completed|done|active").sum() if hasattr(s, 'str_contains') else s.str.contains("success|ok|completed|done|active").sum()
|
||||
bad = s.str_contains("fail|error|dead|invalid").sum() if hasattr(s, 'str_contains') else s.str.contains("fail|error|dead|invalid").sum()
|
||||
pend = s.str_contains("pending|queue|waiting|processing").sum() if hasattr(s, 'str_contains') else s.str.contains("pending|queue|waiting|processing").sum()
|
||||
total = len(df)
|
||||
st.markdown('<div class="stat-row">', unsafe_allow_html=True)
|
||||
c1, c2, c3, c4 = st.columns([1,1,1,2])
|
||||
with c1: st.markdown(_stat_card("Success", ok, "#166534", "✅"), unsafe_allow_html=True)
|
||||
with c2: st.markdown(_stat_card("Failed", bad, "#991B1B", "❌"), unsafe_allow_html=True)
|
||||
with c3: st.markdown(_stat_card("Pending", pend, "#92400E", "⏳"), unsafe_allow_html=True)
|
||||
with c4: st.caption(f"Total rows: {total}")
|
||||
st.markdown('</div>', unsafe_allow_html=True)
|
||||
|
||||
minimal_names = [
|
||||
"id",
|
||||
"created_at",
|
||||
"outlet_name",
|
||||
"processing_type",
|
||||
"total_amount",
|
||||
"triumph_status",
|
||||
"triumph_event",
|
||||
]
|
||||
minimal_cols = _pick_existing_columns(df, minimal_names)
|
||||
|
||||
# Controls row: search only
|
||||
q = st.text_input("Search", placeholder="Type to filter rows across all columns")
|
||||
|
||||
# Apply global search
|
||||
filtered = df
|
||||
if q:
|
||||
q_lower = q.lower()
|
||||
filtered = filtered[filtered.apply(lambda r: r.astype(str).str.lower().str.contains(q_lower).any(), axis=1)]
|
||||
|
||||
# Always use minimal columns
|
||||
visible_cols = minimal_cols
|
||||
if visible_cols:
|
||||
filtered = filtered[visible_cols]
|
||||
|
||||
# Pagination (moved below the table; small controls)
|
||||
total_rows = len(filtered)
|
||||
default_page_size = 25
|
||||
total_pages = max(1, (total_rows + default_page_size - 1) // default_page_size)
|
||||
page_num_state_key = "payload_page_num"
|
||||
if page_num_state_key not in st.session_state:
|
||||
st.session_state[page_num_state_key] = 1
|
||||
start = (st.session_state[page_num_state_key] - 1) * default_page_size
|
||||
end = start + default_page_size
|
||||
page_df = filtered.iloc[start:end]
|
||||
|
||||
# Build display names and style
|
||||
display_map = _build_display_map(page_df)
|
||||
display_df = page_df.rename(columns=display_map)
|
||||
# Format date columns
|
||||
display_df = _format_date_columns(display_df)
|
||||
status_col_original = _detect_status_column(page_df)
|
||||
status_col_display = display_map.get(status_col_original)
|
||||
|
||||
styled = _zebra_style(display_df)
|
||||
styled = _format_two_decimals_for_amounts(styled, display_df)
|
||||
# Always apply status badges
|
||||
if status_col_display:
|
||||
styled = _format_status_with_emoji(styled, display_df, status_col_display)
|
||||
styled = _badge_status_cells(styled, display_df, status_col_display)
|
||||
styled = _format_two_decimals_for_amounts(styled, display_df)
|
||||
styled = styled.set_table_styles([
|
||||
{"selector": "th", "props": "position: sticky; top: 0; background: #F0F6FF; color:#0F172A; font-weight:700;"},
|
||||
{"selector": "tbody td", "props": "border-top: 1px solid rgba(15,23,42,0.06);"},
|
||||
{"selector": "table", "props": "border-collapse: separate; border-spacing: 0;"},
|
||||
]).hide(axis="index")
|
||||
|
||||
st.dataframe(styled, use_container_width=True, height=520)
|
||||
|
||||
# Bottom pagination controls
|
||||
p1, p2, p3 = st.columns([1, 2, 1])
|
||||
with p1:
|
||||
st.caption(f"Showing {len(page_df)} of {total_rows} rows")
|
||||
with p2:
|
||||
st.caption("Page")
|
||||
st.session_state[page_num_state_key] = st.number_input(
|
||||
" ", min_value=1, max_value=total_pages, value=st.session_state[page_num_state_key], step=1, label_visibility="collapsed")
|
||||
with p3:
|
||||
download_df = filtered.rename(columns=_build_display_map(filtered))
|
||||
st.download_button(
|
||||
"Download filtered CSV",
|
||||
data=download_df.to_csv(index=False).encode("utf-8"),
|
||||
file_name="tenantpostings_filtered.csv",
|
||||
use_container_width=True,
|
||||
)
|
||||
11
requirements.txt
Normal file
11
requirements.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
streamlit>=1.36.0
|
||||
sqlalchemy>=2.0.0
|
||||
psycopg2-binary>=2.9.9
|
||||
bcrypt>=4.1.2
|
||||
python-dotenv>=1.0.1
|
||||
pydantic>=2.8.2
|
||||
cryptography>=42.0.8
|
||||
pandas>=2.2.2
|
||||
plotly>=5.17.0
|
||||
APScheduler>=3.10.4
|
||||
tzlocal>=5.3.1
|
||||
97
scripts/scheduler_standalone.py
Normal file
97
scripts/scheduler_standalone.py
Normal file
@@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Standalone scheduler service for Workolik daily email reports.
|
||||
This runs independently of the Streamlit application to avoid multiple instances.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
from apscheduler.schedulers.blocking import BlockingScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
# Add the project root to Python path (scripts/ -> project root)
|
||||
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
from app_core.services.daily_report import main as run_daily_report
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def send_daily_report():
|
||||
"""Send the daily report with database-based deduplication using existing email_logs table."""
|
||||
try:
|
||||
|
||||
# Check if we already sent today's report
|
||||
today = datetime.now(ZoneInfo('Asia/Kolkata')).date()
|
||||
today_str = today.strftime('%Y-%m-%d')
|
||||
|
||||
from app_core.db.database import SessionLocal
|
||||
from sqlalchemy import text
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# Check if daily report was already sent today using existing email_logs table
|
||||
# Match the exact subject we generate in daily_report.py ("Daily Digest - YYYY-MM-DD")
|
||||
result = db.execute(
|
||||
text("SELECT id FROM email_logs WHERE date_for = :date_for AND subject = :subject LIMIT 1"),
|
||||
{"date_for": today_str, "subject": f"Daily Digest - {today_str}"}
|
||||
).fetchone()
|
||||
|
||||
if result:
|
||||
logger.info(f"Daily report already sent today ({today}), skipping...")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Database error checking existing reports: {e}")
|
||||
return
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
logger.info(f"Starting daily report at {datetime.now(ZoneInfo('Asia/Kolkata'))}")
|
||||
result = run_daily_report()
|
||||
if result == 0:
|
||||
logger.info("Daily report sent successfully")
|
||||
else:
|
||||
logger.warning(f"Daily report failed with exit code: {result}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending daily report: {str(e)}")
|
||||
|
||||
def main():
|
||||
"""Main scheduler function."""
|
||||
logger.info("Starting Workolik Daily Email Scheduler")
|
||||
|
||||
# Create scheduler
|
||||
scheduler = BlockingScheduler(timezone=ZoneInfo('Asia/Kolkata'))
|
||||
|
||||
# Schedule daily email at 8:00 PM IST (20:00)
|
||||
scheduler.add_job(
|
||||
func=send_daily_report,
|
||||
trigger=CronTrigger(hour=20, minute=0, timezone=ZoneInfo('Asia/Kolkata')),
|
||||
id='daily_email_report',
|
||||
name='Daily Email Report',
|
||||
replace_existing=True
|
||||
)
|
||||
|
||||
logger.info("Daily email scheduler started - will send reports at 8:00 PM IST")
|
||||
|
||||
try:
|
||||
# Keep the scheduler running
|
||||
scheduler.start()
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Scheduler stopped by user")
|
||||
scheduler.shutdown()
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}")
|
||||
scheduler.shutdown()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
68
scripts/send_past_reports.py
Normal file
68
scripts/send_past_reports.py
Normal file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
from datetime import date
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
# Add the project root to Python path
|
||||
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
from app_core.config.settings import AppSettings
|
||||
from app_core.services.mailer_service import MailerService
|
||||
|
||||
def send_report_for_date(service, settings, report_date):
|
||||
print(f"--- Processing date: {report_date} ---")
|
||||
|
||||
# Check if we should skip if already sent (the user said "we need to send", so I'll skip the check unless specified)
|
||||
# if service.has_sent_for_date(str(report_date)):
|
||||
# print(f"Already sent for {report_date}; skipping.")
|
||||
# return
|
||||
|
||||
df = service.fetch_daily_rows(report_date)
|
||||
if df.empty:
|
||||
print(f"No data for {report_date}. Skipping.")
|
||||
return
|
||||
|
||||
row = df.iloc[0].to_dict()
|
||||
html = service.build_email_html(row, df)
|
||||
|
||||
recipients_env = settings.report_recipients or os.getenv("REPORT_RECIPIENTS")
|
||||
if not recipients_env:
|
||||
print("Error: REPORT_RECIPIENTS env var is empty.")
|
||||
return
|
||||
|
||||
recipients = [r.strip() for r in recipients_env.split(',') if r.strip()]
|
||||
subject = f"Daily Digest - {report_date}"
|
||||
|
||||
print(f"Sending email to: {recipients}")
|
||||
ok, msg = service.send_email(recipients, subject=subject, html=html)
|
||||
|
||||
service.log_email(
|
||||
recipients=recipients,
|
||||
subject=subject,
|
||||
date_for=str(report_date),
|
||||
status="sent" if ok else "failed",
|
||||
error=None if ok else msg
|
||||
)
|
||||
|
||||
if ok:
|
||||
print(f"Successfully sent report for {report_date}")
|
||||
else:
|
||||
print(f"Failed to send report for {report_date}: {msg}")
|
||||
|
||||
def main():
|
||||
settings = AppSettings()
|
||||
service = MailerService(settings)
|
||||
|
||||
dates_to_send = [
|
||||
date(2026, 3, 21),
|
||||
date(2026, 3, 22),
|
||||
]
|
||||
|
||||
for d in dates_to_send:
|
||||
send_report_for_date(service, settings, d)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
91
scripts/send_specific_report.py
Normal file
91
scripts/send_specific_report.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Send a one-off Daily Digest email for a specific date using the app's template.
|
||||
|
||||
Default date: 14.10.2025 (dd.mm.yyyy)
|
||||
Default recipient: suriyakumar.vijayanayagam@gmail.com
|
||||
|
||||
Usage examples:
|
||||
python scripts/send_specific_report.py
|
||||
python scripts/send_specific_report.py --date 14.10.2025 --to you@example.com
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
# Ensure project root on PYTHONPATH when running from scripts/
|
||||
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
if PROJECT_ROOT not in sys.path:
|
||||
sys.path.insert(0, PROJECT_ROOT)
|
||||
|
||||
from app_core.config.settings import AppSettings
|
||||
from app_core.services.mailer_service import MailerService
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Send a daily digest for a specific date")
|
||||
parser.add_argument(
|
||||
"--date",
|
||||
help="Target date in dd.mm.yyyy format (e.g., 14.10.2025)",
|
||||
default="14.10.2025",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--to",
|
||||
help="Recipient email (comma-separated for multiple)",
|
||||
default="suriyakumar.vijayanayagam@gmail.com",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def parse_ddmmyyyy(value: str) -> datetime.date:
|
||||
try:
|
||||
return datetime.strptime(value, "%d.%m.%Y").date()
|
||||
except ValueError as ex:
|
||||
raise SystemExit(f"Invalid date '{value}'. Use dd.mm.yyyy (e.g., 14.10.2025)") from ex
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
target_date = parse_ddmmyyyy(args.date)
|
||||
recipients = [e.strip() for e in args.to.split(",") if e.strip()]
|
||||
|
||||
print("=" * 60)
|
||||
print(f"SENDING Daily Digest for {target_date} to: {', '.join(recipients)}")
|
||||
print("=" * 60)
|
||||
|
||||
settings = AppSettings()
|
||||
service = MailerService(settings)
|
||||
|
||||
# Fetch rows for the date
|
||||
df = service.fetch_daily_rows(target_date)
|
||||
if df.empty:
|
||||
print(f"❌ No rows found for {target_date}. Nothing to send.")
|
||||
return 1
|
||||
|
||||
# Build HTML using first row context + full DataFrame for per-store summary
|
||||
row = df.iloc[0].to_dict()
|
||||
html = service.build_email_html(row, df)
|
||||
|
||||
subject = f"Daily Digest - {target_date}"
|
||||
print(f"Subject: {subject}")
|
||||
|
||||
ok, msg = service.send_email(recipients, subject, html)
|
||||
if ok:
|
||||
print("Email sent successfully")
|
||||
service.log_email(recipients, subject, str(target_date), "sent", None)
|
||||
print("Logged in database")
|
||||
return 0
|
||||
else:
|
||||
print(f"Email failed: {msg}")
|
||||
service.log_email(recipients, subject, str(target_date), "failed", msg)
|
||||
print("Failure logged in database")
|
||||
return 2
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
|
||||
|
||||
85
scripts/test_mail.py
Normal file
85
scripts/test_mail.py
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Single test file for mail service - does everything in one place
|
||||
"""
|
||||
from datetime import datetime, date
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
print("=" * 60)
|
||||
print("📧 MAIL SERVICE TEST")
|
||||
print("=" * 60)
|
||||
|
||||
try:
|
||||
from app_core.services.mailer_service import MailerService
|
||||
from app_core.config.settings import AppSettings
|
||||
|
||||
# Initialize services
|
||||
settings = AppSettings()
|
||||
service = MailerService(settings)
|
||||
|
||||
# Get most recent date with data
|
||||
chosen_date = service.select_report_date()
|
||||
if not chosen_date:
|
||||
print("❌ No data available")
|
||||
exit(1)
|
||||
|
||||
print(f"✅ Using date: {chosen_date}")
|
||||
|
||||
# Fetch data
|
||||
df = service.fetch_daily_rows(chosen_date)
|
||||
print(f"✅ Found {len(df)} records")
|
||||
|
||||
# Build email
|
||||
row = df.iloc[0].to_dict()
|
||||
html = service.build_email_html(row, df)
|
||||
print(f"✅ Email HTML generated ({len(html)} characters)")
|
||||
|
||||
# Show what would be logged
|
||||
ist = ZoneInfo("Asia/Kolkata")
|
||||
now_ist = datetime.now(ist)
|
||||
|
||||
print(f"\n📝 Data that would be inserted in email_logs:")
|
||||
print(f" sent_at: {now_ist}")
|
||||
print(f" recipients: loyalydigital@gmail.com")
|
||||
print(f" subject: Daily Digest - {chosen_date}")
|
||||
print(f" status: sent")
|
||||
print(f" date_for: {chosen_date}")
|
||||
print(f" error: null")
|
||||
|
||||
# Ask user
|
||||
print(f"\n🚀 Send email to loyalydigital@gmail.com? (y/n):")
|
||||
send_confirm = input(" Send? ").strip().lower()
|
||||
|
||||
if send_confirm == 'y':
|
||||
print(f"\n📤 Sending email...")
|
||||
|
||||
recipients = ["loyalydigital@gmail.com"]
|
||||
subject = f"Daily Digest - {chosen_date}"
|
||||
|
||||
ok, msg = service.send_email(recipients, subject, html)
|
||||
|
||||
if ok:
|
||||
print(f"✅ Email sent successfully!")
|
||||
service.log_email(recipients, subject, str(chosen_date), "sent", None)
|
||||
print(f"✅ Logged in database")
|
||||
else:
|
||||
print(f"❌ Email failed: {msg}")
|
||||
service.log_email(recipients, subject, str(chosen_date), "failed", msg)
|
||||
print(f"✅ Failed attempt logged")
|
||||
else:
|
||||
print(f"\n⏭️ Email not sent (test mode)")
|
||||
|
||||
# Show recent logs
|
||||
print(f"\n📋 Recent email logs:")
|
||||
logs = service.recent_logs(limit=5)
|
||||
for log in logs:
|
||||
print(f" {log['status']} - {log['subject']} at {log['sent_at']}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
print(f"\n" + "=" * 60)
|
||||
print("🏁 Done!")
|
||||
print("=" * 60)
|
||||
104
scripts/validate_setup.py
Normal file
104
scripts/validate_setup.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
# Ensure project root is on PYTHONPATH when running from scripts/
|
||||
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
|
||||
from app_core.config.settings import AppSettings
|
||||
from app_core.db.database import engine
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
def check_env(settings: AppSettings) -> list[str]:
|
||||
missing: list[str] = []
|
||||
required = [
|
||||
("DATABASE_URL", settings.database_url or os.getenv("DATABASE_URL")),
|
||||
("SMTP_HOST", settings.smtp_host),
|
||||
("SMTP_PORT", settings.smtp_port),
|
||||
("SMTP_USER", settings.smtp_user),
|
||||
("SMTP_PASSWORD", settings.smtp_password),
|
||||
("SMTP_FROM_EMAIL", settings.smtp_from_email),
|
||||
("REPORT_RECIPIENTS", settings.report_recipients or os.getenv("REPORT_RECIPIENTS")),
|
||||
]
|
||||
for key, val in required:
|
||||
if not val:
|
||||
missing.append(key)
|
||||
return missing
|
||||
|
||||
|
||||
def check_db_connection() -> tuple[bool, str | None]:
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
# SQLAlchemy 2.0: wrap SQL in text() or use exec_driver_sql
|
||||
conn.execute(text("SELECT 1"))
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
def check_smtp_login(s: AppSettings) -> tuple[bool, str | None]:
|
||||
import smtplib
|
||||
|
||||
try:
|
||||
server = smtplib.SMTP(s.smtp_host, s.smtp_port, timeout=20)
|
||||
if s.smtp_use_tls:
|
||||
server.starttls()
|
||||
if s.smtp_user and s.smtp_password:
|
||||
server.login(s.smtp_user, s.smtp_password)
|
||||
# Probe NOOP and quit without sending
|
||||
server.noop()
|
||||
server.quit()
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
print("=== Workolik Production Validation ===")
|
||||
settings = AppSettings()
|
||||
|
||||
# 1) Environment variables
|
||||
missing = check_env(settings)
|
||||
if missing:
|
||||
print("❌ Missing required env vars:", ", ".join(sorted(set(missing))))
|
||||
else:
|
||||
print("✅ Required env vars present")
|
||||
|
||||
# Optional BCC
|
||||
bcc = os.getenv("BCC_RECIPIENTS", "").strip()
|
||||
if bcc:
|
||||
print(f"✅ BCC_RECIPIENTS set: {bcc}")
|
||||
else:
|
||||
print("ℹ️ BCC_RECIPIENTS not set (no BCC will be added)")
|
||||
|
||||
# 2) Database connectivity
|
||||
ok_db, err_db = check_db_connection()
|
||||
if ok_db:
|
||||
print("✅ Database connectivity OK")
|
||||
else:
|
||||
print(f"❌ Database connectivity FAILED: {err_db}")
|
||||
|
||||
# 3) SMTP connectivity (no email will be sent)
|
||||
ok_smtp, err_smtp = check_smtp_login(settings)
|
||||
if ok_smtp:
|
||||
print("✅ SMTP login OK (no email sent)")
|
||||
else:
|
||||
print(f"❌ SMTP login FAILED: {err_smtp}")
|
||||
|
||||
# 4) Scheduler subject check (ensure dedupe matches)
|
||||
today_str = datetime.now().date().strftime('%Y-%m-%d')
|
||||
expected_subject = f"Daily Digest - {today_str}"
|
||||
print(f"✅ Scheduler dedupe subject pattern: {expected_subject}")
|
||||
|
||||
failures = (1 if missing else 0) + (0 if ok_db else 1) + (0 if ok_smtp else 1)
|
||||
print("=== Validation Complete ===")
|
||||
return 0 if failures == 0 else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
48
scripts/verify_scheduler.py
Normal file
48
scripts/verify_scheduler.py
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
print("🔍 VERIFYING SCHEDULER SETUP")
|
||||
print("=" * 50)
|
||||
|
||||
# Check current time
|
||||
ist = ZoneInfo('Asia/Kolkata')
|
||||
now = datetime.now(ist)
|
||||
print(f"Current IST time: {now.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
|
||||
# Check 8 PM today
|
||||
eight_pm = now.replace(hour=20, minute=0, second=0, microsecond=0)
|
||||
print(f"8:00 PM today: {eight_pm.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
|
||||
# Test scheduler
|
||||
try:
|
||||
from app_core.services.scheduler_service import SchedulerService
|
||||
s = SchedulerService()
|
||||
s.start_scheduler()
|
||||
print(f"✅ Scheduler started: {s.is_running()}")
|
||||
|
||||
next_run = s.get_next_run_time()
|
||||
if next_run:
|
||||
next_run_ist = next_run.astimezone(ist)
|
||||
print(f"✅ Next run: {next_run_ist.strftime('%Y-%m-%d %H:%M:%S %Z')}")
|
||||
else:
|
||||
print("❌ No next run time found")
|
||||
|
||||
s.stop_scheduler()
|
||||
print("✅ Scheduler stopped")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Scheduler error: {e}")
|
||||
|
||||
# Test daily report
|
||||
try:
|
||||
from app_core.services.daily_report import main
|
||||
print("\n🧪 Testing daily report...")
|
||||
result = main()
|
||||
print(f"✅ Daily report result: {result}")
|
||||
except Exception as e:
|
||||
print(f"❌ Daily report error: {e}")
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
print("✅ VERIFICATION COMPLETE")
|
||||
print("=" * 50)
|
||||
99
send_test.py
Normal file
99
send_test.py
Normal file
@@ -0,0 +1,99 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Standalone test email - subscription expired style, in our black template.
|
||||
Sends ONLY to suriya@tenext.in. No DB. No real data.
|
||||
DELETE THIS FILE after confirming.
|
||||
|
||||
Run: python send_test.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(dotenv_path=".env", override=False)
|
||||
|
||||
SMTP_HOST = os.getenv("SMTP_HOST")
|
||||
SMTP_PORT = int(os.getenv("SMTP_PORT", "587"))
|
||||
SMTP_USER = os.getenv("SMTP_USER")
|
||||
SMTP_PASSWORD = os.getenv("SMTP_PASSWORD")
|
||||
SMTP_FROM = os.getenv("SMTP_FROM_EMAIL")
|
||||
SMTP_NAME = os.getenv("SMTP_FROM_NAME", "Workolik Team")
|
||||
RECIPIENTS = ["Darshan@caman.au","darshan@caman.com.au","workolik360@gmail.com","ColinA@caman.au","ColinA@caman.com.au","tabs@tuckerfresh.com.au","jay@tuckerfresh.com.au","sanjay@tuckerfresh.com.au","veer@tuckerfresh.com.au"]
|
||||
BCC_RECIPIENTS= ["fazulilahi@gmail.com"]
|
||||
SUBJECT = "SUBSCRIPTION EXPIRED - Daily Digest - 2026-04-03"
|
||||
|
||||
STORES = [
|
||||
"Porters Liquor Claremont - PC",
|
||||
"Porters Iluka - IP",
|
||||
"Cellarbrations at Morris Place - ML",
|
||||
"Cellarbrations at Lynwood - CL",
|
||||
"Cellarbrations at Nicholson Road - NL",
|
||||
"Cellarbrations at Treeby - CL",
|
||||
"The Bottle-O Rossmoyne - RC",
|
||||
"Porters Liquor Piara Waters - PL",
|
||||
]
|
||||
|
||||
X_CELL = "<span style=\"font-size:14px;\">❌</span>"
|
||||
NILL = "<span style=\"color:#9AA4B2;\">Nill</span>"
|
||||
|
||||
store_rows = ""
|
||||
for name in STORES:
|
||||
store_rows += (
|
||||
"<tr>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{name}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{X_CELL}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{X_CELL}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{X_CELL}</td>"
|
||||
f"<td style=\"padding:10px;border-top:1px solid #1F2937;color:#F8FAFC;\">{NILL}</td>"
|
||||
"</tr>"
|
||||
)
|
||||
|
||||
table_html = (
|
||||
"<div style=\"background:#111827;border-radius:12px;padding:12px;\">"
|
||||
"<div style=\"font-weight:700;color:#F8FAFC;margin-bottom:6px;\">Transaction Summary by Store</div>"
|
||||
"<table style=\"width:100%;border-collapse:collapse;font-size:12px;\">"
|
||||
"<tr>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Store Name</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Journal</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Banking Journal</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Account Sales</th>"
|
||||
"<th style=\"text-align:left;padding:10px;color:#E2E8F0;\">Account Payments</th>"
|
||||
"</tr>"
|
||||
+ store_rows
|
||||
+ "</table>"
|
||||
"</div>"
|
||||
)
|
||||
|
||||
html = f"""
|
||||
<div style="font-family: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial; color:#0f172a; font-size:13px; line-height:1.5;">
|
||||
<p style="margin:0 0 8px 0">Hello <strong>Tucker Fresh</strong>,</p>
|
||||
<p style="margin:0 0 8px 0">It looks like your subscription has expired. Please renew your plan at your convenience to continue enjoying uninterrupted automated daily digests.</p>
|
||||
<p style="margin:0 0 12px 0">If you have any questions or need assistance, feel free to reply to this email - we’re happy to help!</p>
|
||||
{table_html}
|
||||
<p style="margin:12px 0 4px 0; font-size:12px; color:#64748b;"><strong style="color:#0f172a;">Status Note:</strong> * No Event IDs generated.</p>
|
||||
<p style="margin:0 0 12px 0; font-size:12px; color:#64748b;"><strong style="color:#0f172a;">Data Sync:</strong> Suspended.</p>
|
||||
<p style="margin:12px 0 6px 0">Thank you for staying updated with us. Please contact support to reactivate your account.</p>
|
||||
<p style="margin:0">Best regards,<br/><strong>Workolik Team</strong></p>
|
||||
</div>
|
||||
"""
|
||||
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["From"] = f"{SMTP_NAME} <{SMTP_FROM}>"
|
||||
msg["To"] = ", ".join(RECIPIENTS)
|
||||
msg["Subject"] = SUBJECT
|
||||
msg.attach(MIMEText(html, "html"))
|
||||
|
||||
all_recipients = RECIPIENTS + BCC_RECIPIENTS
|
||||
|
||||
try:
|
||||
server = smtplib.SMTP(SMTP_HOST, SMTP_PORT, timeout=30)
|
||||
server.starttls()
|
||||
server.login(SMTP_USER, SMTP_PASSWORD)
|
||||
server.sendmail(SMTP_FROM, all_recipients, msg.as_string())
|
||||
server.quit()
|
||||
print(f"Sent to {len(all_recipients)} recipients (including BCC)")
|
||||
except Exception as e:
|
||||
print(f"Failed: {e}")
|
||||
Reference in New Issue
Block a user