Initial commit
This commit is contained in:
380
pages/see_logs.py
Normal file
380
pages/see_logs.py
Normal file
@@ -0,0 +1,380 @@
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
import plotly.graph_objects as go
|
||||
from datetime import datetime, date, timedelta
|
||||
from app_core.db.database import engine
|
||||
from sqlalchemy import text
|
||||
from app_core.ui.layout import render_store_selector
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_available_dates(tenant_id: int, days_back: int = 60):
|
||||
"""Load available dates with data for the tenant."""
|
||||
with engine.connect() as conn:
|
||||
dates_df = pd.read_sql(
|
||||
'SELECT "created_at"::date AS d, COUNT(*) AS c\n'
|
||||
'FROM "tenantpostings"\n'
|
||||
'WHERE "created_at" >= (CURRENT_DATE - INTERVAL \'60 days\') AND "tenant_id" = %(t)s\n'
|
||||
'GROUP BY d\n'
|
||||
'ORDER BY d DESC',
|
||||
conn,
|
||||
params={"t": tenant_id},
|
||||
)
|
||||
if not pd.api.types.is_datetime64_any_dtype(dates_df['d']):
|
||||
dates_df['d'] = pd.to_datetime(dates_df['d'], errors='coerce')
|
||||
return dates_df
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_daily_data(tenant_id: int, target_date: date):
|
||||
"""Load daily data for a specific tenant and date."""
|
||||
day_sql = (
|
||||
'SELECT * FROM "tenantpostings" '
|
||||
'WHERE "created_at"::date = %(d)s AND "tenant_id" = %(t)s '
|
||||
'ORDER BY "id" DESC '
|
||||
'LIMIT 10000'
|
||||
)
|
||||
with engine.connect() as conn:
|
||||
df = pd.read_sql(day_sql, conn, params={"d": target_date, "t": tenant_id})
|
||||
|
||||
# De-duplicate by triumph_event to avoid logical doubling
|
||||
if not df.empty and 'triumph_event' in df.columns:
|
||||
has_event = df['triumph_event'].fillna('').astype(str).str.strip() != ''
|
||||
df_with_events = df[has_event].sort_values(['processing_type', 'triumph_event', 'id'], ascending=[True, True, False]).drop_duplicates(subset=['processing_type', 'triumph_event'], keep='first')
|
||||
df_no_events = df[~has_event]
|
||||
df = pd.concat([df_with_events, df_no_events]).sort_values('id', ascending=False)
|
||||
|
||||
return df
|
||||
|
||||
|
||||
@st.cache_data(ttl=300) # Cache for 5 minutes
|
||||
def _load_trend_data(tenant_id: int, days_back: int = 30):
|
||||
"""Load trend data for charts."""
|
||||
with engine.connect() as conn:
|
||||
totals_agg = pd.read_sql(
|
||||
'SELECT "created_at"::date AS d, SUM("total_amount") AS total\n'
|
||||
'FROM "tenantpostings"\n'
|
||||
'WHERE "created_at" >= (CURRENT_DATE - INTERVAL \'30 days\') AND "tenant_id" = %(t)s\n'
|
||||
" AND UPPER(COALESCE(\"processing_type\", '')) = 'JOURNAL'\n"
|
||||
'GROUP BY d\n'
|
||||
'ORDER BY d ASC',
|
||||
conn,
|
||||
params={"t": tenant_id},
|
||||
)
|
||||
if not pd.api.types.is_datetime64_any_dtype(totals_agg['d']):
|
||||
totals_agg['d'] = pd.to_datetime(totals_agg['d'], errors='coerce')
|
||||
return totals_agg
|
||||
|
||||
|
||||
def _normalize_name(name: str) -> str:
|
||||
return "".join(ch for ch in name.lower() if ch.isalnum())
|
||||
|
||||
|
||||
def _build_display_map(df: pd.DataFrame) -> dict[str, str]:
|
||||
overrides = {
|
||||
"triumph_status": "Status",
|
||||
"triumph_event": "Event",
|
||||
"outlet_name": "Outlet Name",
|
||||
"tenant_id": "Tenant ID",
|
||||
"processing_type": "Processing Type",
|
||||
"total_amount": "Total Amount",
|
||||
"created_at": "Date",
|
||||
"updated_at": "Updated At",
|
||||
"id": "SNo",
|
||||
}
|
||||
display_map: dict[str, str] = {}
|
||||
used: set[str] = set()
|
||||
for col in df.columns:
|
||||
key = col.lower()
|
||||
if key in overrides:
|
||||
display_name = overrides[key]
|
||||
else:
|
||||
# Convert snake_case to Title Case
|
||||
display_name = col.replace("_", " ").title()
|
||||
|
||||
# Ensure unique display names
|
||||
final_name = display_name
|
||||
counter = 1
|
||||
while final_name in used:
|
||||
final_name = f"{display_name} ({counter})"
|
||||
counter += 1
|
||||
|
||||
display_map[col] = final_name
|
||||
used.add(final_name)
|
||||
return display_map
|
||||
|
||||
|
||||
def _pick_existing_columns(df: pd.DataFrame, names: list[str]) -> list[str]:
|
||||
"""Pick columns that exist in the DataFrame from a list of names."""
|
||||
found = []
|
||||
for name in names:
|
||||
if name in df.columns:
|
||||
found.append(name)
|
||||
return found
|
||||
|
||||
|
||||
def _format_date_columns(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""Format date columns to show only date part"""
|
||||
df_formatted = df.copy()
|
||||
for col in df_formatted.columns:
|
||||
if 'created_at' in col.lower() or 'date' in col.lower():
|
||||
if pd.api.types.is_datetime64_any_dtype(df_formatted[col]):
|
||||
df_formatted[col] = df_formatted[col].dt.date
|
||||
else:
|
||||
# Try to convert to datetime first
|
||||
try:
|
||||
df_formatted[col] = pd.to_datetime(df_formatted[col]).dt.date
|
||||
except:
|
||||
pass
|
||||
return df_formatted
|
||||
|
||||
|
||||
def _journal_total(frame: pd.DataFrame) -> float:
|
||||
"""Sum total_amount for JOURNAL rows only."""
|
||||
if frame is None or frame.empty or 'total_amount' not in frame.columns:
|
||||
return 0.0
|
||||
|
||||
# We assume 'frame' is already de-duplicated by triumph_event at load time
|
||||
if 'processing_type' in frame.columns:
|
||||
mask = frame['processing_type'].astype(str).str.upper() == 'JOURNAL'
|
||||
frame = frame[mask]
|
||||
|
||||
return float(frame['total_amount'].sum()) if not frame.empty else 0.0
|
||||
|
||||
|
||||
def _stat_card(title: str, value: str, color: str, icon: str) -> str:
|
||||
return f"""
|
||||
<div style="
|
||||
background: {color}15;
|
||||
border: 1px solid {color}30;
|
||||
border-radius: 12px;
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
box-shadow: 0 4px 12px {color}20;
|
||||
transition: transform 0.2s ease, box-shadow 0.2s ease;
|
||||
cursor: pointer;
|
||||
" onmouseover="this.style.transform='translateY(-2px)'; this.style.boxShadow='0 8px 20px {color}30';"
|
||||
onmouseout="this.style.transform='translateY(0px)'; this.style.boxShadow='0 4px 12px {color}20';">
|
||||
<div style="font-size: 24px; margin-bottom: 8px;">{icon}</div>
|
||||
<div style="font-size: 24px; font-weight: 700; color: {color}; margin-bottom: 4px;">{value}</div>
|
||||
<div style="font-size: 14px; color: #64748b; font-weight: 600;">{title}</div>
|
||||
</div>
|
||||
"""
|
||||
|
||||
|
||||
def render_page():
|
||||
if st.session_state.get("auth_user") is None:
|
||||
st.warning("Please login to continue.")
|
||||
st.stop()
|
||||
|
||||
# Store selector (must be chosen before loading analytics)
|
||||
tenant_id, store_label = render_store_selector()
|
||||
if not tenant_id:
|
||||
st.info("Please choose a store to view analytics.")
|
||||
return
|
||||
|
||||
st.markdown("## 📊 Dashboard")
|
||||
# Date picker for selecting any date
|
||||
picker_col1, _ = st.columns([1, 3])
|
||||
with picker_col1:
|
||||
selected_date = st.date_input("Report date", value=date.today(), max_value=date.today())
|
||||
st.markdown("---")
|
||||
|
||||
# Target date preference is today, but we will fall back to most recent date with data
|
||||
today = date.today()
|
||||
|
||||
# Database connection with caching
|
||||
try:
|
||||
# Find most recent available dates with data (last 60 days)
|
||||
dates_df = _load_available_dates(tenant_id)
|
||||
|
||||
if dates_df.empty:
|
||||
st.warning("No data available in the last 60 days.")
|
||||
return
|
||||
|
||||
# Prefer the user-selected date if present; else pick the most recent date
|
||||
available_dates = list(dates_df['d'].dt.date)
|
||||
if selected_date in available_dates:
|
||||
date_shown = selected_date
|
||||
else:
|
||||
date_shown = available_dates[0]
|
||||
|
||||
# Comparison dates: the most recent prior dates (up to two)
|
||||
prior_dates = [d for d in available_dates if d < date_shown]
|
||||
compare_date = prior_dates[0] if prior_dates else None
|
||||
compare_date2 = prior_dates[1] if len(prior_dates) > 1 else None
|
||||
|
||||
# Load frames using cached functions
|
||||
df = _load_daily_data(tenant_id, date_shown)
|
||||
df_compare = _load_daily_data(tenant_id, compare_date) if compare_date else pd.DataFrame()
|
||||
df_compare2 = _load_daily_data(tenant_id, compare_date2) if compare_date2 else pd.DataFrame()
|
||||
|
||||
if date_shown == selected_date:
|
||||
st.success(f"📅 Showing data for {date_shown.strftime('%B %d, %Y')} ({len(df):,} records)")
|
||||
else:
|
||||
st.info(f"📅 Showing most recent data: {date_shown.strftime('%B %d, %Y')} ({len(df):,} records)")
|
||||
|
||||
except Exception as e:
|
||||
st.error(f"Database connection failed: {str(e)}")
|
||||
return
|
||||
|
||||
# Calculate key metrics (Total Amount uses JOURNAL only)
|
||||
total_amount = _journal_total(df)
|
||||
total_transactions = len(df)
|
||||
success_count = len(df[df['triumph_status'] == 'success']) if 'triumph_status' in df.columns else 0
|
||||
failed_count = len(df[df['triumph_status'] == 'failed']) if 'triumph_status' in df.columns else 0
|
||||
pending_count = len(df[df['triumph_status'] == 'pending']) if 'triumph_status' in df.columns else 0
|
||||
|
||||
# Status summary cards
|
||||
st.markdown("### 📈 Today's Overview")
|
||||
col1, col2, col3, col4, col5 = st.columns(5)
|
||||
|
||||
with col1:
|
||||
st.markdown(_stat_card("Total Amount", f"${total_amount:,.2f}", "#059669", "💰"), unsafe_allow_html=True)
|
||||
with col2:
|
||||
st.markdown(_stat_card("Transactions", f"{total_transactions:,}", "#2563EB", "📊"), unsafe_allow_html=True)
|
||||
with col3:
|
||||
st.markdown(_stat_card("Success", f"{success_count:,}", "#059669", "✅"), unsafe_allow_html=True)
|
||||
with col4:
|
||||
st.markdown(_stat_card("Failed", f"{failed_count:,}", "#DC2626", "❌"), unsafe_allow_html=True)
|
||||
with col5:
|
||||
st.markdown(_stat_card("Pending", f"{pending_count:,}", "#D97706", "⏳"), unsafe_allow_html=True)
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Build simple frames map for totals lookup
|
||||
frames_by_date = {date_shown: df}
|
||||
if 'df_compare' in locals() and compare_date:
|
||||
frames_by_date[compare_date] = df_compare
|
||||
if 'df_compare2' in locals() and 'compare_date2' in locals() and compare_date2:
|
||||
frames_by_date[compare_date2] = df_compare2
|
||||
|
||||
# Stock-like trend line: last 14 days JOURNAL totals, with last 3 days highlighted
|
||||
st.markdown("### 📈 Sales Trend")
|
||||
totals_agg = _load_trend_data(tenant_id)
|
||||
last14 = totals_agg.tail(14).copy() if not totals_agg.empty else pd.DataFrame(columns=['d','total'])
|
||||
if not last14.empty:
|
||||
x_labels = last14['d'].dt.strftime('%b %d')
|
||||
fig_line = go.Figure()
|
||||
fig_line.add_trace(
|
||||
go.Scatter(
|
||||
x=x_labels,
|
||||
y=last14['total'],
|
||||
mode='lines+markers',
|
||||
name='Sales',
|
||||
line=dict(color="#2563EB", width=2.6),
|
||||
marker=dict(size=4, color="#2563EB"),
|
||||
line_shape='spline',
|
||||
hovertemplate="%{x}<br>$%{y:,.2f}<extra></extra>",
|
||||
)
|
||||
)
|
||||
# Highlight last 3 points
|
||||
last3 = last14.tail(3).reset_index(drop=True)
|
||||
colors = ["#94A3B8", "#DC2626", "#16A34A"] # old->gray, prev->red, latest->green
|
||||
labels = ["Prev-2", "Prev", "Latest"]
|
||||
for i in range(len(last3)):
|
||||
fig_line.add_trace(
|
||||
go.Scatter(
|
||||
x=[last3['d'].dt.strftime('%b %d').iloc[i]],
|
||||
y=[last3['total'].iloc[i]],
|
||||
mode='markers',
|
||||
name=labels[i],
|
||||
marker=dict(color=colors[i], size=9, symbol='circle'),
|
||||
hovertemplate=f"{labels[i]}: %{{x}}<br>$%{{y:,.2f}}<extra></extra>",
|
||||
)
|
||||
)
|
||||
# Profit/Loss vs previous day
|
||||
if len(last3) >= 2 and last3['total'].iloc[1] != 0:
|
||||
shown_total = float(last3['total'].iloc[2]) if len(last3) == 3 else float(last3['total'].iloc[-1])
|
||||
prev_total = float(last3['total'].iloc[-2])
|
||||
delta = (shown_total - prev_total) / prev_total * 100.0
|
||||
arrow = '▲' if delta >= 0 else '▼'
|
||||
color = '#16A34A' if delta >= 0 else '#DC2626'
|
||||
fig_line.add_annotation(
|
||||
x=1, y=1.1, xref='paper', yref='paper', showarrow=False,
|
||||
text=f"{arrow} {delta:.1f}% vs {last3['d'].dt.strftime('%b %d').iloc[-2]}",
|
||||
font=dict(color=color, size=14), align='right'
|
||||
)
|
||||
fig_line.update_layout(
|
||||
height=320,
|
||||
showlegend=True,
|
||||
yaxis_title="Total Amount ($)",
|
||||
xaxis_title=None,
|
||||
margin=dict(t=30, b=30, l=30, r=20),
|
||||
plot_bgcolor='white',
|
||||
hovermode='x unified'
|
||||
)
|
||||
fig_line.update_yaxes(showgrid=True, gridcolor='#E5E7EB', zeroline=False)
|
||||
fig_line.update_xaxes(showgrid=False, zeroline=False)
|
||||
st.plotly_chart(fig_line, use_container_width=True)
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# Data table section
|
||||
st.markdown("### 📋 Detailed Data")
|
||||
|
||||
# Minimal columns for default view
|
||||
minimal_names = [
|
||||
"id",
|
||||
"created_at",
|
||||
"outlet_name",
|
||||
"processing_type",
|
||||
"total_amount",
|
||||
"triumph_status",
|
||||
"triumph_event",
|
||||
]
|
||||
minimal_cols = _pick_existing_columns(df, minimal_names)
|
||||
|
||||
# Controls row: search only
|
||||
q = st.text_input("Search", placeholder="Type to filter rows across all columns")
|
||||
|
||||
# Filter data based on search
|
||||
if q:
|
||||
mask = df.astype(str).apply(lambda x: x.str.contains(q, case=False, na=False)).any(axis=1)
|
||||
df_filtered = df[mask]
|
||||
else:
|
||||
df_filtered = df.copy()
|
||||
|
||||
# Always use minimal columns
|
||||
display_cols = minimal_cols if minimal_cols else list(df_filtered.columns[:8])
|
||||
|
||||
# Build display names
|
||||
display_map = _build_display_map(df_filtered)
|
||||
|
||||
# Format the display dataframe
|
||||
df_display = df_filtered[display_cols].copy()
|
||||
df_display.columns = [display_map.get(col, col) for col in display_cols]
|
||||
# Format date columns
|
||||
df_display = _format_date_columns(df_display)
|
||||
|
||||
# Format numeric columns
|
||||
for col in df_display.columns:
|
||||
if 'amount' in col.lower() and df_display[col].dtype in ['float64', 'int64']:
|
||||
df_display[col] = df_display[col].apply(lambda x: f"${x:,.2f}" if pd.notna(x) else "")
|
||||
|
||||
# Always apply status styling
|
||||
if 'Status' in df_display.columns:
|
||||
def style_status(val):
|
||||
if val == 'success':
|
||||
return 'background-color: #D1FAE5; color: #065F46; font-weight: 600;'
|
||||
elif val == 'failed':
|
||||
return 'background-color: #FEE2E2; color: #991B1B; font-weight: 600;'
|
||||
elif val == 'pending':
|
||||
return 'background-color: #FEF3C7; color: #92400E; font-weight: 600;'
|
||||
return ''
|
||||
|
||||
styled_df = df_display.style.map(style_status, subset=['Status'])
|
||||
st.dataframe(styled_df, use_container_width=True, height=400)
|
||||
else:
|
||||
st.dataframe(df_display, use_container_width=True, height=400)
|
||||
|
||||
# Download button
|
||||
if st.button("📥 Download Today's Data as CSV", type="primary"):
|
||||
csv = df_filtered.to_csv(index=False)
|
||||
st.download_button(
|
||||
label="Download CSV",
|
||||
data=csv,
|
||||
file_name=f"workolik_data_{date_shown.strftime('%Y%m%d')}.csv",
|
||||
mime="text/csv"
|
||||
)
|
||||
Reference in New Issue
Block a user