diff --git a/modules/EcmInvoiceOuts/ai/analysisAI.py b/modules/EcmInvoiceOuts/ai/analysisAI.py
index 337346e6..5fd6f295 100644
--- a/modules/EcmInvoiceOuts/ai/analysisAI.py
+++ b/modules/EcmInvoiceOuts/ai/analysisAI.py
@@ -2,60 +2,117 @@
# -*- coding: utf-8 -*-
"""
-analysisAI.py — pobiera dane z MySQL, liczy preagregaty, renderuje HTML i dodaje analizę AI.
+analysisAI.py — pobiera dane z MySQL, liczy wyłącznie WSKAZANE preagregaty,
+renderuje HTML i (opcjonalnie) dodaje analizę AI — tylko jeśli ją zaznaczysz.
-ZMIENNE ŚRODOWISKOWE (wszystkie mają domyślne wartości):
- OPENAI_API_KEY - klucz do OpenAI (gdy pusty -> fallback bez AI)
- OPENAI_MODEL - np. gpt-4.1 (domyślnie), alternatywnie gpt-4.1-mini
- MYSQL_HOST - host MySQL (domyślnie: twinpol-mysql56 lub localhost)
- MYSQL_USER - użytkownik MySQL (domyślnie: root)
- MYSQL_PASSWORD - hasło MySQL (domyślnie: rootpassword)
- MYSQL_DATABASE - nazwa bazy (domyślnie: preDb_0dcc87940d3655fa574b253df04ca1c3)
- MYSQL_PORT - port MySQL (domyślnie: 3306)
- PERIOD_FROM - data od (YYYY-MM-DD); gdy brak -> poprzedni pełny miesiąc
- PERIOD_TO - data do (YYYY-MM-DD, exclusive); gdy brak -> 1. dzień bieżącego miesiąca
- INVOICE_TYPE - typ dokumentu (domyślnie: normal)
+Parametry CLI (z formularza PHP):
+ --date-from YYYY-MM-DD
+ --date-to YYYY-MM-DD (zamieniane wewnętrznie na +1 dzień, bo SQL ma warunek '< date_to')
+ --metric NAZWA (można podać wiele razy: --metric a --metric b ...)
+ --metrics CSV (opcjonalnie alternatywnie: --metrics a,b,c)
+ --ai true|false (czy uruchomić analizę AI — tylko gdy są preagregaty z danymi)
+
+Preagregaty:
+ - kpis (aliasy: basic, basic_totals) — podstawowe KPI: sprzedaż, ilość, dokumenty, ASP
+ - daily_sales, product_summary, customer_summary, product_daily,
+ top10_products_by_sales, top10_customers_by_sales (z preaggregates.py)
"""
-import os, sys, json, math, time, warnings
-from datetime import date, timedelta
+import os, sys, json, math, time, warnings, argparse, traceback, html
+from datetime import date, timedelta, datetime
-API_KEY = "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"
-
-#5 pro
-#API_KEY = "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"
-# ──(1) Wycisz ostrzeżenia urllib3 (LibreSSL / stary OpenSSL) ───────────────────
+# (1) Wycisza ostrzeżenia urllib3 (LibreSSL / stary OpenSSL)
try:
from urllib3.exceptions import NotOpenSSLWarning
warnings.filterwarnings("ignore", category=NotOpenSSLWarning)
except Exception:
pass
-# ──(2) Importy zewnętrzne ──────────────────────────────────────────────────────
+# (2) Importy zewnętrzne
import requests
import mysql.connector
+import pandas as pd
-# Twoje preagregaty (muszą być w tym samym katalogu / PYTHONPATH)
-from preaggregates import compute_preaggregates, serialize_for_ai
+LOOKER_URL = "https://lookerstudio.google.com/u/0/reporting/107d4ccc-e7eb-4c38-8dce-00700b44f60e/page/ba1YF"
-# ──(3) Konfiguracja klucza AI ──────────────────────────────────────────────────
-# Wpisz tutaj klucz jeśli chcesz mieć go „na sztywno”, inaczej zostaw pusty:
-API_KEY_HARDCODE = API_KEY # np. "sk-xxxx..." (NIEZALECANE w produkcji)
+# ========== KONFIGURACJA KLUCZA AI ==========
+API_KEY = "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"
+API_KEY_HARDCODE = API_KEY
+
+# === Import preagregatów ===
+from preaggregates import serialize_for_ai
+import preaggregates as pre # pre.AGGREGATORS, pre.to_df
+
+# ========== UTILKI ==========
+
+def html_fatal(msg, title="Błąd"):
+ sys.stdout.write(
+ '
'
+ f'
{html.escape(title)}
'
+ f'
{html.escape(msg)}'
+ '
'
+ )
+ sys.exit(1)
+
+def connect_html_or_die(cfg, label="MySQL"):
+ try:
+ return mysql.connector.connect(**cfg)
+ except mysql.connector.Error as e:
+ host = cfg.get("host"); port = cfg.get("port"); user = cfg.get("user")
+ base = (f"[{label}] Błąd połączenia ({host}:{port} jako '{user}').\n"
+ f"errno={getattr(e,'errno',None)} sqlstate={getattr(e,'sqlstate',None)}\n"
+ f"msg={getattr(e,'msg',str(e))}")
+ if os.environ.get("DEBUG"):
+ base += "\n\n" + traceback.format_exc()
+ html_fatal(base, title="Błąd połączenia MySQL")
-# ──(4) Utils ───────────────────────────────────────────────────────────────────
def getenv(k, d=None):
return os.environ.get(k, d)
def last_full_month_bounds():
- """Zwraca (from_iso, to_iso) dla poprzedniego pełnego miesiąca."""
today_first = date.today().replace(day=1)
to_dt = today_first
prev_last = today_first - timedelta(days=1)
from_dt = prev_last.replace(day=1)
return from_dt.isoformat(), to_dt.isoformat()
+def add_one_day(iso_date):
+ try:
+ return (datetime.strptime(iso_date, "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d")
+ except Exception:
+ return iso_date # w razie czego oddaj wejście
+
+def safe_num(v, ndigits=None):
+ try:
+ f = float(v)
+ if not math.isfinite(f):
+ return None
+ return round(f, ndigits) if ndigits is not None else f
+ except Exception:
+ return None
+
+def safe_date(v):
+ if v is None:
+ return None
+ try:
+ if hasattr(v, "date"):
+ return str(v.date())
+ s = str(v)
+ if len(s) >= 10 and s[4] == '-' and s[7] == '-':
+ return s[:10]
+ return s
+ except Exception:
+ return None
+
+def fmt_money(v):
+ try:
+ return "{:,.2f}".format(float(v)).replace(",", " ").replace(".", ",")
+ except Exception:
+ return str(v)
+
def compact_table(table, limit=30):
- """Przytnij listę rekordów (list[dict]) i znormalizuj liczby (NaN/Inf -> None)."""
out = []
if not table:
return out
@@ -71,21 +128,8 @@ def compact_table(table, limit=30):
out.append(new)
return out
-def build_ai_payload(serialized, period_label):
- """Kompaktowy JSON do AI (rozmiar przycięty, ale zawiera wszystkie główne tabele)."""
- return {
- "kpis_hint": {"period_label": period_label},
- "daily_sales": compact_table(serialized.get("daily_sales"), 60),
- "product_summary": compact_table(serialized.get("product_summary"), 100),
- "customer_summary": compact_table(serialized.get("customer_summary"), 100),
- "top10_products_by_sales": compact_table(serialized.get("top10_products_by_sales"), 10),
- "top10_customers_by_sales": compact_table(serialized.get("top10_customers_by_sales"), 10),
- "product_daily_sample": compact_table(serialized.get("product_daily"), 100),
- }
-
def call_openai_chat(api_key, model, system_prompt, user_payload_json,
temperature=0.3, connect_timeout=10, read_timeout=90, max_retries=3):
- """Wywołanie Chat Completions (retry + backoff). Zwraca HTML (sekcję) od AI."""
url = "https://api.openai.com/v1/chat/completions"
headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"}
body = {
@@ -95,7 +139,6 @@ def call_openai_chat(api_key, model, system_prompt, user_payload_json,
{"role": "user", "content": "Dane (JSON):\n\n" + user_payload_json},
],
"temperature": temperature,
- # "max_tokens": 1200, # opcjonalnie ogranicz długość odpowiedzi
}
last_err = None
for attempt in range(1, int(max_retries) + 1):
@@ -110,14 +153,7 @@ def call_openai_chat(api_key, model, system_prompt, user_payload_json,
time.sleep(min(2 ** attempt, 10))
raise RuntimeError("OpenAI request failed: {}".format(last_err))
-def fmt_money(v):
- try:
- return "{:,.2f}".format(float(v)).replace(",", " ").replace(".", ",")
- except Exception:
- return str(v)
-
def html_table(records, title=None, max_rows=20):
- """Proste generowanie tabeli HTML z listy dict-ów (lekki CSS inline w
"""
-# ──(5) Główna logika ───────────────────────────────────────────────────────────
+
+# ========== UPSerTY DO REPORTING (jak u Ciebie) ==========
+
+def _ensure_rank_and_share(items, key_sales="sales"):
+ if not items: return
+ total_sales = sum((x.get(key_sales) or 0) for x in items)
+ sorted_items = sorted(
+ items,
+ key=lambda x: ((x.get(key_sales) or 0), str(x.get("product_code") or x.get("customer_name") or "")),
+ reverse=True
+ )
+ rank_map, rank = {}, 1
+ for x in sorted_items:
+ key = x.get("product_code") or x.get("customer_name") or ""
+ if key not in rank_map:
+ rank_map[key] = rank
+ rank += 1
+ for x in items:
+ key = x.get("product_code") or x.get("customer_name") or ""
+ if not x.get("rank_in_period"):
+ x["rank_in_period"] = rank_map.get(key, 0)
+ if "mix_share_sales" not in x:
+ x["mix_share_sales"] = ((x.get(key_sales) or 0) / total_sales) if total_sales else 0.0
+
+def upsert_daily_sales(cur, daily):
+ if not daily: return 0
+ sql = """
+ INSERT INTO reporting_daily_sales
+ (period_date, qty, sales, docs, asp, sales_rolling7, sales_dod_pct)
+ VALUES (%s,%s,%s,%s,%s,%s,%s)
+ ON DUPLICATE KEY UPDATE
+ qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
+ asp=VALUES(asp), sales_rolling7=VALUES(sales_rolling7), sales_dod_pct=VALUES(sales_dod_pct),
+ generated_at=CURRENT_TIMESTAMP
+ """
+ rows = []
+ for r in daily:
+ period_date = safe_date(r.get("register_date") or r.get("period_date") or r.get("date"))
+ rows.append((
+ period_date,
+ safe_num(r.get("qty")),
+ safe_num(r.get("sales")),
+ safe_num(r.get("docs")),
+ safe_num(r.get("asp"), 6),
+ safe_num(r.get("sales_rolling7"), 6),
+ safe_num(r.get("sales_pct_change_dod") or r.get("sales_dod_pct"), 6),
+ ))
+ cur.executemany(sql, rows)
+ return len(rows)
+
+def upsert_product_summary(cur, prod, period_from, period_to):
+ if not prod: return 0
+ _ensure_rank_and_share(prod, key_sales="sales")
+ sql = """
+ INSERT INTO reporting_product_summary
+ (period_start, period_end, product_code, product_name, qty, sales, docs,
+ asp_weighted, mix_share_sales, rank_in_period)
+ VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
+ ON DUPLICATE KEY UPDATE
+ qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
+ asp_weighted=VALUES(asp_weighted), mix_share_sales=VALUES(mix_share_sales),
+ rank_in_period=VALUES(rank_in_period), generated_at=CURRENT_TIMESTAMP
+ """
+ rows = []
+ for r in prod:
+ rows.append((
+ period_from, period_to,
+ r.get("product_code"), r.get("product_name"),
+ safe_num(r.get("qty")),
+ safe_num(r.get("sales")),
+ safe_num(r.get("docs")),
+ safe_num(r.get("asp_weighted"), 6),
+ safe_num(r.get("mix_share_sales"), 6),
+ int(r.get("rank_in_period") or 0),
+ ))
+ cur.executemany(sql, rows)
+ return len(rows)
+
+def upsert_customer_summary(cur, cust, period_from, period_to):
+ if not cust: return 0
+ _ensure_rank_and_share(cust, key_sales="sales")
+ sql = """
+ INSERT INTO reporting_customer_summary
+ (period_start, period_end, customer_name, qty, sales, docs,
+ asp_weighted, mix_share_sales, rank_in_period)
+ VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
+ ON DUPLICATE KEY UPDATE
+ qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
+ asp_weighted=VALUES(asp_weighted), mix_share_sales=VALUES(mix_share_sales),
+ rank_in_period=VALUES(rank_in_period), generated_at=CURRENT_TIMESTAMP
+ """
+ rows = []
+ for r in cust:
+ rows.append((
+ period_from, period_to,
+ r.get("customer_name"),
+ safe_num(r.get("qty")),
+ safe_num(r.get("sales")),
+ safe_num(r.get("docs")),
+ safe_num(r.get("asp_weighted"), 6),
+ safe_num(r.get("mix_share_sales"), 6),
+ int(r.get("rank_in_period") or 0),
+ ))
+ cur.executemany(sql, rows)
+ return len(rows)
+
+def upsert_product_daily(cur, prod_daily):
+ if not prod_daily: return 0
+ sql = """
+ INSERT INTO reporting_product_daily
+ (period_date, product_code, product_name, qty, sales, asp)
+ VALUES (%s,%s,%s,%s,%s,%s)
+ ON DUPLICATE KEY UPDATE
+ qty=VALUES(qty), sales=VALUES(sales), asp=VALUES(asp),
+ generated_at=CURRENT_TIMESTAMP
+ """
+ rows = []
+ for r in prod_daily:
+ period_date = safe_date(r.get("register_date") or r.get("period_date") or r.get("date"))
+ qty = safe_num(r.get("qty"))
+ sales = safe_num(r.get("sales"))
+ asp = safe_num((sales / qty) if (qty and sales is not None and qty != 0) else r.get("asp"), 6)
+ rows.append((
+ period_date,
+ r.get("product_code"),
+ r.get("product_name"),
+ qty, sales, asp
+ ))
+ cur.executemany(sql, rows)
+ return len(rows)
+
+# ========== ARGPARSE & LOGIKA WYBORU ==========
+
+def parse_cli_args():
+ p = argparse.ArgumentParser()
+ p.add_argument('--date-from', dest='date_from', required=False, help='YYYY-MM-DD')
+ p.add_argument('--date-to', dest='date_to', required=False, help='YYYY-MM-DD (inclusive, we add +1 day internally)')
+ # akceptuj obie formy: wielokrotne --metric oraz (opcjonalnie) --metrics CSV
+ p.add_argument('--metric', dest='metric', action='append', default=[], help='Nazwa preagregatu; można podać wiele razy')
+ p.add_argument('--metrics', dest='metrics', action='append', default=[], help='CSV: a,b,c (można podać wiele razy)')
+ p.add_argument('--ai', dest='ai', choices=['true','false'], default='false')
+ return p.parse_args()
+
+def collect_metric_names(args):
+ names = []
+ # z --metric (powtarzalne)
+ if args.metric:
+ names.extend([s.strip() for s in args.metric if s and s.strip()])
+ # z --metrics (może być kilka wystąpień; każde może być CSV)
+ for entry in (args.metrics or []):
+ if not entry:
+ continue
+ for part in str(entry).replace(';', ',').replace(' ', ',').split(','):
+ part = part.strip()
+ if part:
+ names.append(part)
+ # aliasy dla kpis
+ alias_map = {'basic': 'kpis', 'basic_totals': 'kpis'}
+ names = [alias_map.get(n, n) for n in names]
+ # deduplikacja z zachowaniem kolejności
+ seen = set()
+ uniq = []
+ for n in names:
+ if n not in seen:
+ seen.add(n)
+ uniq.append(n)
+ return uniq
+
+def compute_selected_preaggs(rows, names):
+ """
+ Liczy TYLKO wskazane preagregaty. ZAWSZE zwraca DataFrame'y (nigdy listy).
+ Obsługuje pseudo-agregat 'kpis' (podstawowe KPI).
+ """
+ results = {}
+ if not names:
+ return results
+ df = pre.to_df(rows)
+
+ # kpis — pseudoagregat
+ def compute_kpis_df(dfx):
+ if dfx is None or dfx.empty:
+ return pd.DataFrame([{
+ "total_sales": 0.0,
+ "total_qty": 0.0,
+ "total_docs": 0,
+ "asp": None,
+ }])
+ total_sales = float(dfx["total_netto"].sum())
+ total_qty = float(dfx["quantity"].sum())
+ total_docs = int(dfx["document_no"].nunique())
+ asp = (total_sales / total_qty) if total_qty else None
+ return pd.DataFrame([{
+ "total_sales": total_sales,
+ "total_qty": total_qty,
+ "total_docs": total_docs,
+ "asp": asp,
+ }])
+
+ for name in names:
+ if name == 'kpis':
+ results[name] = compute_kpis_df(df)
+ continue
+
+ fn = pre.AGGREGATORS.get(name)
+ if not fn:
+ results[name] = pd.DataFrame() # nieznany agregat -> pusty
+ continue
+ try:
+ out = fn(df)
+ if out is None:
+ results[name] = pd.DataFrame()
+ elif hasattr(out, "copy"):
+ results[name] = out.copy()
+ else:
+ results[name] = pd.DataFrame(out)
+ except Exception:
+ # np. top10_* na pustych danych -> zwróć pusty wynik
+ results[name] = pd.DataFrame()
+
+ return results
+
+def sanitize_serialized(serialized_dict):
+ """
+ Jeśli jakikolwiek agregat zwrócił błąd (np. _error), zamieniamy na pustą listę.
+ """
+ clean = {}
+ for k, records in (serialized_dict or {}).items():
+ if not records:
+ clean[k] = []
+ continue
+ if isinstance(records, list) and isinstance(records[0], dict) and ('_error' in records[0]):
+ clean[k] = []
+ else:
+ clean[k] = records
+ return clean
+
+def has_any_rows(serialized_dict):
+ for records in (serialized_dict or {}).values():
+ if records: # lista niepusta
+ return True
+ return False
+
+# ========== MAIN ==========
+
def main():
- # Konfiguracja DB
+ # --- CLI ---
+ args = parse_cli_args()
+ with_ai = (args.ai == 'true')
+ metric_names = collect_metric_names(args)
+
+ # --- Daty: preferuj CLI; 'date_to' inkluzywne (dodajemy +1 dzień dla SQL '<') ---
+ if args.date_from and args.date_to:
+ period_from, period_to = args.date_from, add_one_day(args.date_to)
+ shown_label = "{} .. {}".format(args.date_from, args.date_to)
+ else:
+ env_from, env_to = getenv("PERIOD_FROM"), getenv("PERIOD_TO")
+ if env_from and env_to:
+ period_from, period_to = env_from, env_to
+ # label dla czytelności: to-1d
+ try:
+ to_label = (datetime.strptime(period_to, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
+ except Exception:
+ to_label = period_to
+ shown_label = "{} .. {}".format(period_from, to_label)
+ else:
+ period_from, period_to = last_full_month_bounds()
+ # label: poprzedni pełny miesiąc
+ try:
+ to_label = (datetime.strptime(period_to, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
+ except Exception:
+ to_label = period_to
+ shown_label = "{} .. {}".format(period_from, to_label)
+
+ # --- DB ---
cfg = {
"host": getenv("MYSQL_HOST", "twinpol-mysql56"),
"user": getenv("MYSQL_USER", "root"),
@@ -196,16 +511,135 @@ def main():
"database": getenv("MYSQL_DATABASE", "preDb_0dcc87940d3655fa574b253df04ca1c3"),
"port": int(getenv("MYSQL_PORT", "3306")),
}
-
- # Zakres dat
- period_from = getenv("PERIOD_FROM")
- period_to = getenv("PERIOD_TO")
- if not period_from or not period_to:
- period_from, period_to = last_full_month_bounds()
- period_label = "{} .. {}".format(period_from, period_to)
invoice_type = getenv("INVOICE_TYPE", "normal")
- # Konfiguracja AI (model do API + alias do UI)
+ # --- SQL -> rows (UWZGLĘDNIJ DATY; typ wg ENV) ---
+ try:
+ cnx = mysql.connector.connect(**cfg)
+ cur = cnx.cursor()
+ if invoice_type:
+ cur.execute(
+ """
+ SELECT i.document_no,
+ i.parent_name,
+ DATE(i.register_date) AS register_date,
+ ii.code,
+ ii.name,
+ ii.quantity,
+ ii.total_netto
+ FROM ecminvoiceoutitems AS ii
+ JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
+ WHERE i.register_date >= %s
+ AND i.register_date < %s
+ AND i.type = %s
+ """,
+ (period_from, period_to, invoice_type),
+ )
+ else:
+ cur.execute(
+ """
+ SELECT i.document_no,
+ i.parent_name,
+ DATE(i.register_date) AS register_date,
+ ii.code,
+ ii.name,
+ ii.quantity,
+ ii.total_netto
+ FROM ecminvoiceoutitems AS ii
+ JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
+ WHERE i.register_date >= %s
+ AND i.register_date < %s
+ """,
+ (period_from, period_to),
+ )
+ rows = cur.fetchall()
+ cur.close()
+ cnx.close()
+ except Exception as e:
+ html_fatal(str(e), title="Błąd połączenia/zapytania MySQL")
+
+ # --- LICZ TYLKO WYBRANE PREAGREGATY (w tym pseudo 'kpis') ---
+ results = {}
+ serialized = {}
+ if metric_names:
+ results = compute_selected_preaggs(rows, metric_names)
+ serialized = serialize_for_ai(results)
+ serialized = sanitize_serialized(serialized) # usuń ewentualne _error -> traktuj jako puste
+ else:
+ serialized = {}
+
+ # --- ZAPIS do reporting (tylko to, co faktycznie policzyłeś) ---
+ try:
+ if serialized:
+ rep_cfg = {
+ "host": "host.docker.internal",
+ "port": 3307,
+ "user": "remote",
+ "password": os.environ.get("REPORTING_PASSWORD", "areiufh*&^yhdua"),
+ "database": "ai",
+ }
+ if os.environ.get("REPORTING_SSL_CA"):
+ rep_cfg["ssl_ca"] = os.environ["REPORTING_SSL_CA"]
+ if os.environ.get("REPORTING_SSL_CERT"):
+ rep_cfg["ssl_cert"] = os.environ["REPORTING_SSL_CERT"]
+ if os.environ.get("REPORTING_SSL_KEY"):
+ rep_cfg["ssl_key"] = os.environ["REPORTING_SSL_KEY"]
+
+ cnx2 = connect_html_or_die(rep_cfg, label="ReportingDB")
+ cur2 = cnx2.cursor()
+
+ if "daily_sales" in serialized:
+ upsert_daily_sales(cur2, serialized.get("daily_sales") or [])
+ if "product_summary" in serialized:
+ upsert_product_summary(cur2, serialized.get("product_summary") or [], period_from, period_to)
+ if "customer_summary" in serialized:
+ upsert_customer_summary(cur2, serialized.get("customer_summary") or [], period_from, period_to)
+ if "product_daily" in serialized:
+ upsert_product_daily(cur2, serialized.get("product_daily") or [])
+
+ cnx2.commit()
+ cur2.close(); cnx2.close()
+ except Exception as e:
+ sys.stderr.write(f"[reporting] ERROR: {e}\n")
+
+ # --- KPI: jeśli wybrano 'kpis' -> bierz z wyników; w przeciwnym razie spróbuj z daily_sales; inaczej zera ---
+ kpis = []
+ if "kpis" in results and isinstance(results["kpis"], pd.DataFrame) and not results["kpis"].empty:
+ r = results["kpis"].iloc[0]
+ total_sales = r.get("total_sales") or 0
+ total_qty = r.get("total_qty") or 0
+ total_docs = r.get("total_docs") or 0
+ asp = r.get("asp")
+ else:
+ daily = serialized.get("daily_sales") or []
+ total_sales = sum((x.get("sales") or 0) for x in daily) if daily else 0
+ total_qty = sum((x.get("qty") or 0) for x in daily) if daily else 0
+ total_docs = sum((x.get("docs") or 0) for x in daily) if daily else 0
+ asp = (total_sales / total_qty) if total_qty else None
+
+ kpis = [
+ ("Sprzedaż (PLN)", fmt_money(total_sales)),
+ ("Ilość (szt.)", "{:,.0f}".format(total_qty).replace(",", " ")),
+ ("Dokumenty", "{:,.0f}".format(total_docs).replace(",", " ")),
+ ("ASP (PLN/szt.)", fmt_money(asp) if asp is not None else "—"),
+ ]
+
+ # --- Sekcje HTML: renderuj tylko te, które policzyłeś ---
+ parts = []
+ if "top10_products_by_sales" in serialized:
+ parts.append(html_table(serialized.get("top10_products_by_sales") or [], title="Top 10 produktów (po sprzedaży)", max_rows=10))
+ if "top10_customers_by_sales" in serialized:
+ parts.append(html_table(serialized.get("top10_customers_by_sales") or [], title="Top 10 klientów (po sprzedaży)", max_rows=10))
+ if "daily_sales" in serialized:
+ parts.append(html_table(serialized.get("daily_sales") or [], title="Sprzedaż dzienna (skrót)", max_rows=30))
+ if "product_summary" in serialized:
+ parts.append(html_table(serialized.get("product_summary") or [], title="Podsumowanie produktów (skrót)", max_rows=30))
+ if "customer_summary" in serialized:
+ parts.append(html_table(serialized.get("customer_summary") or [], title="Podsumowanie klientów (skrót)", max_rows=30))
+ if "product_daily" in serialized:
+ parts.append(html_table(serialized.get("product_daily") or [], title="Produkt × Dzień (próbka)", max_rows=30))
+
+ # --- AI tylko gdy: --ai true ORAZ jest co najmniej jeden rekord w którymś z wybranych agregatów ---
api_key = API_KEY_HARDCODE or getenv("OPENAI_API_KEY", "")
model = getenv("OPENAI_MODEL", "gpt-4.1")
MODEL_ALIAS = {
@@ -216,95 +650,19 @@ def main():
}
model_alias = MODEL_ALIAS.get(model, model)
- system_prompt = (
- "Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez //), "
- "może być pojedynczy z nagłówkami i listami. Podsumuj kluczowe trendy (dzień, mix), wskaż top produkty/klientów, "
- "anomalia/odchylenia oraz daj 3–6 praktycznych rekomendacji dla sprzedaży/zaopatrzenia/marketingu. Krótko i konkretnie, po polsku."
- )
-
- # SQL -> rows
- try:
- cnx = mysql.connector.connect(**cfg)
- cur = cnx.cursor()
- cur.execute(
- """
- SELECT i.document_no,
- i.parent_name,
- DATE(i.register_date) AS register_date,
- ii.code,
- ii.name,
- ii.quantity,
- ii.total_netto
- FROM ecminvoiceoutitems AS ii
- JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
- WHERE i.register_date >= %s
- AND i.register_date < %s
- AND i.type = %s
- """,
- (period_from, period_to, invoice_type),
- )
- rows = cur.fetchall()
- cur.close()
- cnx.close()
- except Exception as e:
- sys.stdout.write(
- '
'
- '
Błąd połączenia/zapytania MySQL
'
- f'
{str(e)}
'
- )
- sys.exit(1)
-
- # Preagregaty
- try:
- results = compute_preaggregates(rows)
- serialized = serialize_for_ai(results)
- except Exception as e:
- sys.stdout.write(
- '
'
- '
Błąd preagregacji
'
- f'
{str(e)}
'
- )
- sys.exit(1)
-
- # KPI (na podstawie daily_sales)
- daily = serialized.get("daily_sales") or []
- total_sales = sum((r.get("sales") or 0) for r in daily)
- total_qty = sum((r.get("qty") or 0) for r in daily)
- total_docs = sum((r.get("docs") or 0) for r in daily)
- asp = (total_sales / total_qty) if total_qty else None
- kpis = [
- ("Sprzedaż (PLN)", fmt_money(total_sales)),
- ("Ilość (szt.)", "{:,.0f}".format(total_qty).replace(",", " ")),
- ("Dokumenty", "{:,.0f}".format(total_docs).replace(",", " ")),
- ("ASP (PLN/szt.)", fmt_money(asp) if asp is not None else "—"),
- ]
-
- # Sekcje HTML — WYŚWIETLAMY WSZYSTKIE KLUCZOWE PREAGREGATY
- top_prod = serialized.get("top10_products_by_sales") or []
- top_cli = serialized.get("top10_customers_by_sales") or []
- daily_tbl = html_table(serialized.get("daily_sales") or [], title="Sprzedaż dzienna (skrót)", max_rows=30)
- prod_sum_tbl = html_table(serialized.get("product_summary") or [], title="Podsumowanie produktów (skrót)", max_rows=30)
- cust_sum_tbl = html_table(serialized.get("customer_summary") or [], title="Podsumowanie klientów (skrót)", max_rows=30)
- prod_daily_tbl= html_table(serialized.get("product_daily") or [], title="Produkt × Dzień (próbka)", max_rows=30)
- prod_tbl = html_table(top_prod, title="Top 10 produktów (po sprzedaży)", max_rows=10)
- cust_tbl = html_table(top_cli, title="Top 10 klientów (po sprzedaży)", max_rows=10)
-
- # Dane do AI
- ai_data = build_ai_payload(serialized, period_label)
- ai_json = json.dumps(ai_data, ensure_ascii=False, separators=(",", ":"), default=str)
-
- # Wołanie AI (z fallbackiem na mini model przy 429: insufficient_quota)
ai_section = ""
- if api_key:
+ if with_ai and has_any_rows(serialized):
try:
+ ai_data = {"kpis_hint": {"period_label": shown_label}}
+ for name, records in serialized.items():
+ ai_data[name] = compact_table(records, 100)
+ ai_json = json.dumps(ai_data, ensure_ascii=False, separators=(",", ":"), default=str)
+
ai_section = call_openai_chat(
- api_key=api_key,
+ api_key=(api_key or ""),
model=model,
- system_prompt=system_prompt,
+ system_prompt=("Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez //). "
+ "Streszcz kluczowe trendy i daj 3–6 zaleceń. Po polsku."),
user_payload_json=ai_json,
temperature=0.3,
connect_timeout=10,
@@ -314,39 +672,40 @@ def main():
except Exception as e:
err = str(e)
if "insufficient_quota" in err or "You exceeded your current quota" in err:
- # spróbuj tańszego modelu
try:
ai_section = call_openai_chat(
- api_key=api_key,
+ api_key=(api_key or ""),
model="gpt-4.1-mini",
- system_prompt=system_prompt,
+ system_prompt=("Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez //). "
+ "Streszcz kluczowe trendy i daj 3–6 zaleceń. Po polsku."),
user_payload_json=ai_json,
temperature=0.3,
connect_timeout=10,
read_timeout=90,
max_retries=2,
)
- model_alias = "GPT-5 Mini"
+ model_alias = "GPT-4.1-mini"
except Exception as ee:
ai_section = (
'
'
- f'Brak dostępnego limitu API. {str(ee)}
'
+ 'padding:10px;border-radius:8px;">Brak dostępnego limitu API. {}
'.format(str(ee))
)
else:
ai_section = (
''
- f'Błąd wywołania AI: {err}
'
+ 'padding:10px;border-radius:8px;">Błąd wywołania AI: {}'.format(err)
)
+ else:
+ ai_section = 'Analiza AI wyłączona lub brak wybranych danych.
'
+ model_alias = ""
- # Finalny HTML (jeden )
+ # --- Finalny HTML ---
report_html = render_report_html(
- period_label=period_label,
+ period_label=shown_label,
kpis=kpis,
- parts=[prod_tbl, cust_tbl, daily_tbl, prod_sum_tbl, cust_sum_tbl, prod_daily_tbl],
+ parts=parts,
ai_section=ai_section,
- model_alias=model_alias if api_key else ""
+ model_alias=(model_alias if (with_ai and has_any_rows(serialized)) else "")
)
sys.stdout.write(report_html)
diff --git a/modules/EcmInvoiceOuts/report_form.php b/modules/EcmInvoiceOuts/report_form.php
new file mode 100644
index 00000000..7697d5b4
--- /dev/null
+++ b/modules/EcmInvoiceOuts/report_form.php
@@ -0,0 +1,177 @@
+&1', $output, $returnVar);
+
+ $ran = true;
+ $rc = $returnVar;
+ $out = implode("\n", $output);
+ $ok = ($returnVar === 0);
+
+ if (!$ok && $err === '') {
+ $err = "Błąd uruchamiania skryptu Python (kod: " . $rc . "):\n" . $out;
+ }
+ }
+}
+?>
+
+
+
+
+
Generator raportu sprzedaży
+
+
+
+
+
+
Raport sprzedaży — parametry
+
+
+
+
+
+
Użyte parametry
+
+ Od:
+ Do:
+ AI:
+
+
Preagregaty:
+ '.h($p).'';
+ }
+ } else {
+ echo 'brak';
+ }
+ ?>
+
+
+
Wynik analizy
+
+
+
+
+
+
+
+
+
+