Files
crm.twinpol.com/modules/EcmInvoiceOuts/ai/analysisAI.py
2025-09-29 00:19:15 +02:00

714 lines
28 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
analysisAI.py — pobiera dane z MySQL, liczy wyłącznie WSKAZANE preagregaty,
renderuje HTML i (opcjonalnie) dodaje analizę AI — tylko jeśli ją zaznaczysz.
Parametry CLI (z formularza PHP):
--date-from YYYY-MM-DD
--date-to YYYY-MM-DD (zamieniane wewnętrznie na +1 dzień, bo SQL ma warunek '< date_to')
--metric NAZWA (można podać wiele razy: --metric a --metric b ...)
--metrics CSV (opcjonalnie alternatywnie: --metrics a,b,c)
--ai true|false (czy uruchomić analizę AI — tylko gdy są preagregaty z danymi)
Preagregaty:
- kpis (aliasy: basic, basic_totals) — podstawowe KPI: sprzedaż, ilość, dokumenty, ASP
- daily_sales, product_summary, customer_summary, product_daily,
top10_products_by_sales, top10_customers_by_sales (z preaggregates.py)
"""
import os, sys, json, math, time, warnings, argparse, traceback, html
from datetime import date, timedelta, datetime
# (1) Wycisza ostrzeżenia urllib3 (LibreSSL / stary OpenSSL)
try:
from urllib3.exceptions import NotOpenSSLWarning
warnings.filterwarnings("ignore", category=NotOpenSSLWarning)
except Exception:
pass
# (2) Importy zewnętrzne
import requests
import mysql.connector
import pandas as pd
LOOKER_URL = "https://lookerstudio.google.com/u/0/reporting/107d4ccc-e7eb-4c38-8dce-00700b44f60e/page/ba1YF"
# ========== KONFIGURACJA KLUCZA AI ==========
API_KEY = "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"
API_KEY_HARDCODE = API_KEY
# === Import preagregatów ===
from preaggregates import serialize_for_ai
import preaggregates as pre # pre.AGGREGATORS, pre.to_df
# ========== UTILKI ==========
def html_fatal(msg, title="Błąd"):
sys.stdout.write(
'<div style="font-family:system-ui,-apple-system,Segoe UI,Roboto,Arial,sans-serif;'
'max-width:900px;margin:24px auto;padding:16px 20px;border:1px solid #fecaca;'
'border-radius:12px;background:#fff5f5;color:#991b1b;">'
f'<h3 style="margin:0 0 8px;font-size:18px;">{html.escape(title)}</h3>'
f'<pre style="white-space:pre-wrap;margin:0;">{html.escape(msg)}</pre>'
'</div>'
)
sys.exit(1)
def connect_html_or_die(cfg, label="MySQL"):
try:
return mysql.connector.connect(**cfg)
except mysql.connector.Error as e:
host = cfg.get("host"); port = cfg.get("port"); user = cfg.get("user")
base = (f"[{label}] Błąd połączenia ({host}:{port} jako '{user}').\n"
f"errno={getattr(e,'errno',None)} sqlstate={getattr(e,'sqlstate',None)}\n"
f"msg={getattr(e,'msg',str(e))}")
if os.environ.get("DEBUG"):
base += "\n\n" + traceback.format_exc()
html_fatal(base, title="Błąd połączenia MySQL")
def getenv(k, d=None):
return os.environ.get(k, d)
def last_full_month_bounds():
today_first = date.today().replace(day=1)
to_dt = today_first
prev_last = today_first - timedelta(days=1)
from_dt = prev_last.replace(day=1)
return from_dt.isoformat(), to_dt.isoformat()
def add_one_day(iso_date):
try:
return (datetime.strptime(iso_date, "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d")
except Exception:
return iso_date # w razie czego oddaj wejście
def safe_num(v, ndigits=None):
try:
f = float(v)
if not math.isfinite(f):
return None
return round(f, ndigits) if ndigits is not None else f
except Exception:
return None
def safe_date(v):
if v is None:
return None
try:
if hasattr(v, "date"):
return str(v.date())
s = str(v)
if len(s) >= 10 and s[4] == '-' and s[7] == '-':
return s[:10]
return s
except Exception:
return None
def fmt_money(v):
try:
return "{:,.2f}".format(float(v)).replace(",", " ").replace(".", ",")
except Exception:
return str(v)
def compact_table(table, limit=30):
out = []
if not table:
return out
lim = int(limit)
for i, row in enumerate(table):
if i >= lim: break
new = {}
for k, v in row.items():
if isinstance(v, float):
new[k] = round(v, 6) if math.isfinite(v) else None
else:
new[k] = v
out.append(new)
return out
def call_openai_chat(api_key, model, system_prompt, user_payload_json,
temperature=0.3, connect_timeout=10, read_timeout=90, max_retries=3):
url = "https://api.openai.com/v1/chat/completions"
headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"}
body = {
"model": model,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": "Dane (JSON):\n\n" + user_payload_json},
],
"temperature": temperature,
}
last_err = None
for attempt in range(1, int(max_retries) + 1):
try:
r = requests.post(url, headers=headers, json=body, timeout=(connect_timeout, read_timeout))
if 200 <= r.status_code < 300:
data = r.json()
return data.get("choices", [{}])[0].get("message", {}).get("content", "")
last_err = RuntimeError("OpenAI HTTP {}: {}".format(r.status_code, r.text))
except requests.exceptions.RequestException as e:
last_err = e
time.sleep(min(2 ** attempt, 10))
raise RuntimeError("OpenAI request failed: {}".format(last_err))
def html_table(records, title=None, max_rows=20):
if not records:
return '<div class="empty">Brak danych</div>'
cols = list(records[0].keys())
body_rows = records[:max_rows]
thead = "".join("<th>{}</th>".format(c) for c in cols)
trs = []
for r in body_rows:
tds = []
for c in cols:
val = r.get(c, "")
if isinstance(val, (int, float)):
if any(x in c.lower() for x in ("sales", "total", "netto", "value", "asp", "qty", "quantity", "share", "change")):
tds.append('<td class="num">{}</td>'.format(fmt_money(val) if "sales" in c.lower() or "total" in c.lower() or "netto" in c.lower() else val))
else:
tds.append('<td class="num">{}</td>'.format(val))
else:
tds.append('<td>{}</td>'.format(val))
trs.append("<tr>{}</tr>".format("".join(tds)))
cap = '<div class="tbl-title">{}</div>'.format(title) if title else ""
return (
cap +
'<div class="tbl-wrap"><table class="tbl">'
'<thead><tr>{}</tr></thead><tbody>{}</tbody></table></div>'.format(thead, "".join(trs))
)
def render_report_html(period_label, kpis, parts, ai_section, model_alias):
css = (
"font-family:system-ui,-apple-system,Segoe UI,Roboto,Arial,sans-serif;"
"max-width:1200px;margin:24px auto;padding:16px 20px;border:1px solid #e5e7eb;"
"border-radius:12px;background:#fff;color:#111827"
)
kpi_item = (
'<div class="kpi"><div class="kpi-label">{label}</div>'
'<div class="kpi-value">{value}</div></div>'
)
kpi_html = "".join(kpi_item.format(label=lbl, value=val) for (lbl, val) in kpis)
sections_html = "".join(parts)
if ai_section and not ai_section.lstrip().startswith("<div"):
ai_section = '<div class="ai-section">{}</div>'.format(ai_section)
return f"""
<div style="{css}">
<h2 style="margin:0 0 12px;font-size:22px;">Raport sprzedaży — {period_label}</h2>
<div style="display:grid;grid-template-columns:repeat(4,minmax(0,1fr));gap:12px;margin:12px 0 20px;">
{kpi_html}
</div>
{sections_html if sections_html.strip() else '<div class="empty">Nie wybrano żadnych preagregatów — brak sekcji do wyświetlenia.</div>'}
<div style="margin-top:20px;border-top:1px solid #e5e7eb;padding-top:16px;">
<h3 style="margin:0 0 8px;font-size:18px;">Analiza i rekomendacje{(' (AI · ' + model_alias + ')') if model_alias else ''}</h3>
{ai_section if ai_section else '<div style="color:#6b7280">Analiza AI wyłączona lub brak danych.</div>'}
</div>
<!-- STOPKA z linkiem do Looker Studio -->
<div style="margin-top:20px;border-top:1px dashed #e5e7eb;padding-top:12px;display:flex;justify-content:flex-end;">
<a href="{LOOKER_URL}" target="_blank" rel="noopener"
style="text-decoration:none;padding:8px 12px;border:1px solid #d1d5db;border-radius:8px;
background:#f9fafb;color:#111827;font-weight:600;">
→ Otwórz pełny raport w Looker Studio
</a>
</div>
</div>
<style>
.kpi {{background:#f8fafc;border:1px solid #e5e7eb;border-radius:10px;padding:12px;}}
.kpi-label {{font-size:12px;color:#6b7280;margin-bottom:4px;}}
.kpi-value {{font-size:18px;font-weight:700;}}
.tbl-title {{font-weight:600;margin:16px 0 8px;font-size:15px;}}
.tbl-wrap {{overflow-x:auto;border:1px solid #e5e7eb;border-radius:8px;}}
.tbl {{border-collapse:collapse;width:100%;font-size:14px;}}
.tbl thead th {{text-align:left;background:#f3f4f6;padding:8px;border-bottom:1px solid #e5e7eb;white-space:nowrap;}}
.tbl tbody td {{padding:8px;border-bottom:1px solid #f3f4f6;vertical-align:top;}}
.tbl td.num {{text-align:right;white-space:nowrap;}}
.empty {{color:#6b7280;font-style:italic;margin:8px 0;}}
.ai-section {{background:#f8fafc;border:1px solid #e5e7eb;border-radius:10px;padding:12px;}}
</style>
"""
# ========== UPSerTY DO REPORTING (jak u Ciebie) ==========
def _ensure_rank_and_share(items, key_sales="sales"):
if not items: return
total_sales = sum((x.get(key_sales) or 0) for x in items)
sorted_items = sorted(
items,
key=lambda x: ((x.get(key_sales) or 0), str(x.get("product_code") or x.get("customer_name") or "")),
reverse=True
)
rank_map, rank = {}, 1
for x in sorted_items:
key = x.get("product_code") or x.get("customer_name") or ""
if key not in rank_map:
rank_map[key] = rank
rank += 1
for x in items:
key = x.get("product_code") or x.get("customer_name") or ""
if not x.get("rank_in_period"):
x["rank_in_period"] = rank_map.get(key, 0)
if "mix_share_sales" not in x:
x["mix_share_sales"] = ((x.get(key_sales) or 0) / total_sales) if total_sales else 0.0
def upsert_daily_sales(cur, daily):
if not daily: return 0
sql = """
INSERT INTO reporting_daily_sales
(period_date, qty, sales, docs, asp, sales_rolling7, sales_dod_pct)
VALUES (%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
asp=VALUES(asp), sales_rolling7=VALUES(sales_rolling7), sales_dod_pct=VALUES(sales_dod_pct),
generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in daily:
period_date = safe_date(r.get("register_date") or r.get("period_date") or r.get("date"))
rows.append((
period_date,
safe_num(r.get("qty")),
safe_num(r.get("sales")),
safe_num(r.get("docs")),
safe_num(r.get("asp"), 6),
safe_num(r.get("sales_rolling7"), 6),
safe_num(r.get("sales_pct_change_dod") or r.get("sales_dod_pct"), 6),
))
cur.executemany(sql, rows)
return len(rows)
def upsert_product_summary(cur, prod, period_from, period_to):
if not prod: return 0
_ensure_rank_and_share(prod, key_sales="sales")
sql = """
INSERT INTO reporting_product_summary
(period_start, period_end, product_code, product_name, qty, sales, docs,
asp_weighted, mix_share_sales, rank_in_period)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
asp_weighted=VALUES(asp_weighted), mix_share_sales=VALUES(mix_share_sales),
rank_in_period=VALUES(rank_in_period), generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in prod:
rows.append((
period_from, period_to,
r.get("product_code"), r.get("product_name"),
safe_num(r.get("qty")),
safe_num(r.get("sales")),
safe_num(r.get("docs")),
safe_num(r.get("asp_weighted"), 6),
safe_num(r.get("mix_share_sales"), 6),
int(r.get("rank_in_period") or 0),
))
cur.executemany(sql, rows)
return len(rows)
def upsert_customer_summary(cur, cust, period_from, period_to):
if not cust: return 0
_ensure_rank_and_share(cust, key_sales="sales")
sql = """
INSERT INTO reporting_customer_summary
(period_start, period_end, customer_name, qty, sales, docs,
asp_weighted, mix_share_sales, rank_in_period)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
asp_weighted=VALUES(asp_weighted), mix_share_sales=VALUES(mix_share_sales),
rank_in_period=VALUES(rank_in_period), generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in cust:
rows.append((
period_from, period_to,
r.get("customer_name"),
safe_num(r.get("qty")),
safe_num(r.get("sales")),
safe_num(r.get("docs")),
safe_num(r.get("asp_weighted"), 6),
safe_num(r.get("mix_share_sales"), 6),
int(r.get("rank_in_period") or 0),
))
cur.executemany(sql, rows)
return len(rows)
def upsert_product_daily(cur, prod_daily):
if not prod_daily: return 0
sql = """
INSERT INTO reporting_product_daily
(period_date, product_code, product_name, qty, sales, asp)
VALUES (%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), asp=VALUES(asp),
generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in prod_daily:
period_date = safe_date(r.get("register_date") or r.get("period_date") or r.get("date"))
qty = safe_num(r.get("qty"))
sales = safe_num(r.get("sales"))
asp = safe_num((sales / qty) if (qty and sales is not None and qty != 0) else r.get("asp"), 6)
rows.append((
period_date,
r.get("product_code"),
r.get("product_name"),
qty, sales, asp
))
cur.executemany(sql, rows)
return len(rows)
# ========== ARGPARSE & LOGIKA WYBORU ==========
def parse_cli_args():
p = argparse.ArgumentParser()
p.add_argument('--date-from', dest='date_from', required=False, help='YYYY-MM-DD')
p.add_argument('--date-to', dest='date_to', required=False, help='YYYY-MM-DD (inclusive, we add +1 day internally)')
# akceptuj obie formy: wielokrotne --metric oraz (opcjonalnie) --metrics CSV
p.add_argument('--metric', dest='metric', action='append', default=[], help='Nazwa preagregatu; można podać wiele razy')
p.add_argument('--metrics', dest='metrics', action='append', default=[], help='CSV: a,b,c (można podać wiele razy)')
p.add_argument('--ai', dest='ai', choices=['true','false'], default='false')
return p.parse_args()
def collect_metric_names(args):
names = []
# z --metric (powtarzalne)
if args.metric:
names.extend([s.strip() for s in args.metric if s and s.strip()])
# z --metrics (może być kilka wystąpień; każde może być CSV)
for entry in (args.metrics or []):
if not entry:
continue
for part in str(entry).replace(';', ',').replace(' ', ',').split(','):
part = part.strip()
if part:
names.append(part)
# aliasy dla kpis
alias_map = {'basic': 'kpis', 'basic_totals': 'kpis'}
names = [alias_map.get(n, n) for n in names]
# deduplikacja z zachowaniem kolejności
seen = set()
uniq = []
for n in names:
if n not in seen:
seen.add(n)
uniq.append(n)
return uniq
def compute_selected_preaggs(rows, names):
"""
Liczy TYLKO wskazane preagregaty. ZAWSZE zwraca DataFrame'y (nigdy listy).
Obsługuje pseudo-agregat 'kpis' (podstawowe KPI).
"""
results = {}
if not names:
return results
df = pre.to_df(rows)
# kpis — pseudoagregat
def compute_kpis_df(dfx):
if dfx is None or dfx.empty:
return pd.DataFrame([{
"total_sales": 0.0,
"total_qty": 0.0,
"total_docs": 0,
"asp": None,
}])
total_sales = float(dfx["total_netto"].sum())
total_qty = float(dfx["quantity"].sum())
total_docs = int(dfx["document_no"].nunique())
asp = (total_sales / total_qty) if total_qty else None
return pd.DataFrame([{
"total_sales": total_sales,
"total_qty": total_qty,
"total_docs": total_docs,
"asp": asp,
}])
for name in names:
if name == 'kpis':
results[name] = compute_kpis_df(df)
continue
fn = pre.AGGREGATORS.get(name)
if not fn:
results[name] = pd.DataFrame() # nieznany agregat -> pusty
continue
try:
out = fn(df)
if out is None:
results[name] = pd.DataFrame()
elif hasattr(out, "copy"):
results[name] = out.copy()
else:
results[name] = pd.DataFrame(out)
except Exception:
# np. top10_* na pustych danych -> zwróć pusty wynik
results[name] = pd.DataFrame()
return results
def sanitize_serialized(serialized_dict):
"""
Jeśli jakikolwiek agregat zwrócił błąd (np. _error), zamieniamy na pustą listę.
"""
clean = {}
for k, records in (serialized_dict or {}).items():
if not records:
clean[k] = []
continue
if isinstance(records, list) and isinstance(records[0], dict) and ('_error' in records[0]):
clean[k] = []
else:
clean[k] = records
return clean
def has_any_rows(serialized_dict):
for records in (serialized_dict or {}).values():
if records: # lista niepusta
return True
return False
# ========== MAIN ==========
def main():
# --- CLI ---
args = parse_cli_args()
with_ai = (args.ai == 'true')
metric_names = collect_metric_names(args)
# --- Daty: preferuj CLI; 'date_to' inkluzywne (dodajemy +1 dzień dla SQL '<') ---
if args.date_from and args.date_to:
period_from, period_to = args.date_from, add_one_day(args.date_to)
shown_label = "{} .. {}".format(args.date_from, args.date_to)
else:
env_from, env_to = getenv("PERIOD_FROM"), getenv("PERIOD_TO")
if env_from and env_to:
period_from, period_to = env_from, env_to
# label dla czytelności: to-1d
try:
to_label = (datetime.strptime(period_to, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
except Exception:
to_label = period_to
shown_label = "{} .. {}".format(period_from, to_label)
else:
period_from, period_to = last_full_month_bounds()
# label: poprzedni pełny miesiąc
try:
to_label = (datetime.strptime(period_to, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
except Exception:
to_label = period_to
shown_label = "{} .. {}".format(period_from, to_label)
# --- DB ---
cfg = {
"host": getenv("MYSQL_HOST", "twinpol-mysql56"),
"user": getenv("MYSQL_USER", "root"),
"password": getenv("MYSQL_PASSWORD", "rootpassword"),
"database": getenv("MYSQL_DATABASE", "preDb_0dcc87940d3655fa574b253df04ca1c3"),
"port": int(getenv("MYSQL_PORT", "3306")),
}
invoice_type = getenv("INVOICE_TYPE", "normal")
# --- SQL -> rows (UWZGLĘDNIJ DATY; typ wg ENV) ---
try:
cnx = mysql.connector.connect(**cfg)
cur = cnx.cursor()
if invoice_type:
cur.execute(
"""
SELECT i.document_no,
i.parent_name,
DATE(i.register_date) AS register_date,
ii.code,
ii.name,
ii.quantity,
ii.total_netto
FROM ecminvoiceoutitems AS ii
JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
WHERE i.register_date >= %s
AND i.register_date < %s
AND i.type = %s
""",
(period_from, period_to, invoice_type),
)
else:
cur.execute(
"""
SELECT i.document_no,
i.parent_name,
DATE(i.register_date) AS register_date,
ii.code,
ii.name,
ii.quantity,
ii.total_netto
FROM ecminvoiceoutitems AS ii
JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
WHERE i.register_date >= %s
AND i.register_date < %s
""",
(period_from, period_to),
)
rows = cur.fetchall()
cur.close()
cnx.close()
except Exception as e:
html_fatal(str(e), title="Błąd połączenia/zapytania MySQL")
# --- LICZ TYLKO WYBRANE PREAGREGATY (w tym pseudo 'kpis') ---
results = {}
serialized = {}
if metric_names:
results = compute_selected_preaggs(rows, metric_names)
serialized = serialize_for_ai(results)
serialized = sanitize_serialized(serialized) # usuń ewentualne _error -> traktuj jako puste
else:
serialized = {}
# --- ZAPIS do reporting (tylko to, co faktycznie policzyłeś) ---
try:
if serialized:
rep_cfg = {
"host": "host.docker.internal",
"port": 3307,
"user": "remote",
"password": os.environ.get("REPORTING_PASSWORD", "areiufh*&^yhdua"),
"database": "ai",
}
if os.environ.get("REPORTING_SSL_CA"):
rep_cfg["ssl_ca"] = os.environ["REPORTING_SSL_CA"]
if os.environ.get("REPORTING_SSL_CERT"):
rep_cfg["ssl_cert"] = os.environ["REPORTING_SSL_CERT"]
if os.environ.get("REPORTING_SSL_KEY"):
rep_cfg["ssl_key"] = os.environ["REPORTING_SSL_KEY"]
cnx2 = connect_html_or_die(rep_cfg, label="ReportingDB")
cur2 = cnx2.cursor()
if "daily_sales" in serialized:
upsert_daily_sales(cur2, serialized.get("daily_sales") or [])
if "product_summary" in serialized:
upsert_product_summary(cur2, serialized.get("product_summary") or [], period_from, period_to)
if "customer_summary" in serialized:
upsert_customer_summary(cur2, serialized.get("customer_summary") or [], period_from, period_to)
if "product_daily" in serialized:
upsert_product_daily(cur2, serialized.get("product_daily") or [])
cnx2.commit()
cur2.close(); cnx2.close()
except Exception as e:
sys.stderr.write(f"[reporting] ERROR: {e}\n")
# --- KPI: jeśli wybrano 'kpis' -> bierz z wyników; w przeciwnym razie spróbuj z daily_sales; inaczej zera ---
kpis = []
if "kpis" in results and isinstance(results["kpis"], pd.DataFrame) and not results["kpis"].empty:
r = results["kpis"].iloc[0]
total_sales = r.get("total_sales") or 0
total_qty = r.get("total_qty") or 0
total_docs = r.get("total_docs") or 0
asp = r.get("asp")
else:
daily = serialized.get("daily_sales") or []
total_sales = sum((x.get("sales") or 0) for x in daily) if daily else 0
total_qty = sum((x.get("qty") or 0) for x in daily) if daily else 0
total_docs = sum((x.get("docs") or 0) for x in daily) if daily else 0
asp = (total_sales / total_qty) if total_qty else None
kpis = [
("Sprzedaż (PLN)", fmt_money(total_sales)),
("Ilość (szt.)", "{:,.0f}".format(total_qty).replace(",", " ")),
("Dokumenty", "{:,.0f}".format(total_docs).replace(",", " ")),
("ASP (PLN/szt.)", fmt_money(asp) if asp is not None else ""),
]
# --- Sekcje HTML: renderuj tylko te, które policzyłeś ---
parts = []
if "top10_products_by_sales" in serialized:
parts.append(html_table(serialized.get("top10_products_by_sales") or [], title="Top 10 produktów (po sprzedaży)", max_rows=10))
if "top10_customers_by_sales" in serialized:
parts.append(html_table(serialized.get("top10_customers_by_sales") or [], title="Top 10 klientów (po sprzedaży)", max_rows=10))
if "daily_sales" in serialized:
parts.append(html_table(serialized.get("daily_sales") or [], title="Sprzedaż dzienna (skrót)", max_rows=30))
if "product_summary" in serialized:
parts.append(html_table(serialized.get("product_summary") or [], title="Podsumowanie produktów (skrót)", max_rows=30))
if "customer_summary" in serialized:
parts.append(html_table(serialized.get("customer_summary") or [], title="Podsumowanie klientów (skrót)", max_rows=30))
if "product_daily" in serialized:
parts.append(html_table(serialized.get("product_daily") or [], title="Produkt × Dzień (próbka)", max_rows=30))
# --- AI tylko gdy: --ai true ORAZ jest co najmniej jeden rekord w którymś z wybranych agregatów ---
api_key = API_KEY_HARDCODE or getenv("OPENAI_API_KEY", "")
model = getenv("OPENAI_MODEL", "gpt-4.1")
MODEL_ALIAS = {
"gpt-4.1": "GPT-4.1",
"gpt-4.1-mini": "GPT-4.1-mini",
"gpt-4o": "GPT-4o",
"gpt-4o-mini": "GPT-4o-mini",
}
model_alias = MODEL_ALIAS.get(model, model)
ai_section = ""
if with_ai and has_any_rows(serialized):
try:
ai_data = {"kpis_hint": {"period_label": shown_label}}
for name, records in serialized.items():
ai_data[name] = compact_table(records, 100)
ai_json = json.dumps(ai_data, ensure_ascii=False, separators=(",", ":"), default=str)
ai_section = call_openai_chat(
api_key=(api_key or ""),
model=model,
system_prompt=("Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez <html>/<head>/<body>). "
"Streszcz kluczowe trendy i daj 36 zaleceń. Po polsku."),
user_payload_json=ai_json,
temperature=0.3,
connect_timeout=10,
read_timeout=90,
max_retries=3,
)
except Exception as e:
err = str(e)
if "insufficient_quota" in err or "You exceeded your current quota" in err:
try:
ai_section = call_openai_chat(
api_key=(api_key or ""),
model="gpt-4.1-mini",
system_prompt=("Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez <html>/<head>/<body>). "
"Streszcz kluczowe trendy i daj 36 zaleceń. Po polsku."),
user_payload_json=ai_json,
temperature=0.3,
connect_timeout=10,
read_timeout=90,
max_retries=2,
)
model_alias = "GPT-4.1-mini"
except Exception as ee:
ai_section = (
'<div style="color:#991b1b;background:#fff5f5;border:1px solid #fecaca;'
'padding:10px;border-radius:8px;">Brak dostępnego limitu API. {}</div>'.format(str(ee))
)
else:
ai_section = (
'<div style="color:#991b1b;background:#fff5f5;border:1px solid #fecaca;'
'padding:10px;border-radius:8px;">Błąd wywołania AI: {}</div>'.format(err)
)
else:
ai_section = '<div style="color:#6b7280">Analiza AI wyłączona lub brak wybranych danych.</div>'
model_alias = ""
# --- Finalny HTML ---
report_html = render_report_html(
period_label=shown_label,
kpis=kpis,
parts=parts,
ai_section=ai_section,
model_alias=(model_alias if (with_ai and has_any_rows(serialized)) else "")
)
sys.stdout.write(report_html)
if __name__ == "__main__":
main()