diff --git a/modules/EcmInvoiceOuts/ai/enqueue.php b/modules/EcmInvoiceOuts/ai/enqueue.php
new file mode 100644
index 00000000..d8b10d73
--- /dev/null
+++ b/modules/EcmInvoiceOuts/ai/enqueue.php
@@ -0,0 +1,21 @@
+ $id]);
diff --git a/modules/EcmInvoiceOuts/ai/result.php b/modules/EcmInvoiceOuts/ai/result.php
new file mode 100644
index 00000000..81a59317
--- /dev/null
+++ b/modules/EcmInvoiceOuts/ai/result.php
@@ -0,0 +1,12 @@
+ pl.DataFrame:
+ conn = pymysql.connect(**MYSQL_CONF)
+ try:
+ with conn.cursor() as cur:
+ cur.execute(sql, params)
+ rows = cur.fetchall()
+ finally:
+ conn.close()
+ return pl.from_dicts(rows)
+
+def to_csv(df: pl.DataFrame) -> str:
+ buf = io.StringIO()
+ df.write_csv(buf)
+ return buf.getvalue()
+
+SQL_KPIS_DAILY = """
+SELECT DATE(invoice_date) AS d,
+ SUM(net_amount) AS revenue,
+ SUM(quantity) AS qty,
+ ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
+ ROUND(100*SUM(discount_amount)/NULLIF(SUM(gross_amount),0), 2) AS discount_pct
+FROM fact_invoices
+WHERE invoice_date BETWEEN %s AND %s
+GROUP BY 1
+ORDER BY 1;
+"""
+
+SQL_TOP_SEGMENTS = """
+SELECT {axis} AS key,
+ ANY_VALUE({label}) AS label,
+ SUM(net_amount) AS revenue,
+ SUM(quantity) AS qty,
+ ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
+ ROUND(100*(SUM(net_amount) - LAG(SUM(net_amount)) OVER(ORDER BY 1))/
+ NULLIF(LAG(SUM(net_amount)) OVER(ORDER BY 1),0), 2) AS trend_30d
+FROM fact_invoices
+WHERE invoice_date BETWEEN DATE_SUB(%s, INTERVAL 60 DAY) AND %s
+GROUP BY 1
+ORDER BY revenue DESC
+LIMIT %s;
+"""
+
+class AIClient:
+ def __init__(self, api_key: str): self.api_key = api_key
+ @retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
+ def structured_analysis(self, prompt: str, schema: Dict[str, Any]) -> Dict[str, Any]:
+ # TODO: PODMIEŃ na realne wywołanie modelu z "Structured Outputs"
+ raise NotImplementedError("Wire your model SDK here")
+
+ @retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
+ def batch_submit(self, ndjson_lines: List[str]) -> str:
+ # TODO: PODMIEŃ na rzeczywiste Batch API
+ raise NotImplementedError
+
+def run_online(from_date: str, to_date: str, currency: str, axis: str, label: str, top_n: int, goal: str) -> Dict[str, Any]:
+ kpis = mysql_query(SQL_KPIS_DAILY, (from_date, to_date))
+ top = mysql_query(SQL_TOP_SEGMENTS.format(axis=axis, label=label), (from_date, to_date, top_n))
+
+ csv_blocks = ("## kpis_daily\n" + to_csv(kpis) + "\n\n" +
+ "## top_segments\n" + to_csv(top))
+
+ with open(os.path.join(os.path.dirname(__file__), "sales-analysis.schema.json"), "r", encoding="utf-8") as f:
+ schema = json.load(f)
+
+ prompt = f"""
+Jesteś analitykiem sprzedaży. Otrzymasz: (a) kontekst, (b) dane.
+Zwróć **wyłącznie** JSON zgodny ze schema.
+
+Kontekst:
+- Waluta: {currency}
+- Zakres: {from_date} → {to_date}
+- Cel: {goal}
+- Poziom segmentacji: {axis}
+
+Dane (CSV):
+{csv_blocks}
+
+Wskazówki:
+- Użyj danych jak są (nie wymyślaj liczb).
+- W meta.scope wpisz opis zakresu i segmentacji.
+- Jeśli brak anomalii – anomalies: [].
+- Kwoty do 2 miejsc, procenty do 1.
+"""
+
+ ai = AIClient(AI_API_KEY)
+ result = ai.structured_analysis(prompt, schema)
+
+ out_dir = os.path.join(os.path.dirname(__file__), "out")
+ os.makedirs(out_dir, exist_ok=True)
+ out_path = os.path.join(out_dir, f"{uuid.uuid4()}.json")
+ with open(out_path, "w", encoding="utf-8") as f:
+ json.dump(result, f, ensure_ascii=False)
+ return {"status": "ok", "path": out_path}
+
+def run_batch(from_date: str, to_date: str, axis: str, label: str):
+ # Zgodnie z blueprintem – generujemy linie NDJSON (skrót; pełny wariant w PDF)
+ # TODO: dodać realne wywołania batch_submit i zapisać ID/stan
+ raise NotImplementedError("Implement batch per blueprint")
+
+if __name__ == "__main__":
+ import argparse
+ p = argparse.ArgumentParser()
+ sub = p.add_subparsers(dest="cmd")
+ o = sub.add_parser("online")
+ o.add_argument("from_date"); o.add_argument("to_date"); o.add_argument("currency")
+ o.add_argument("axis", choices=["sku_id","client_id","region_code"])
+ o.add_argument("label"); o.add_argument("top_n", type=int, nargs="?", default=50)
+ o.add_argument("goal")
+ b = sub.add_parser("batch")
+ b.add_argument("from_date"); b.add_argument("to_date"); b.add_argument("axis"); b.add_argument("label")
+ args = p.parse_args()
+
+ if args.cmd == "online":
+ print(run_online(args.from_date, args.to_date, args.currency, args.axis, args.label, args.top_n, args.goal))
+ elif args.cmd == "batch":
+ print(run_batch(args.from_date, args.to_date, args.axis, args.label))
+ else:
+ p.print_help()
diff --git a/modules/EcmInvoiceOuts/bimit_invoiceSummary.php b/modules/EcmInvoiceOuts/bimit_invoiceSummary.php
index 3c3458a7..ec32374d 100644
--- a/modules/EcmInvoiceOuts/bimit_invoiceSummary.php
+++ b/modules/EcmInvoiceOuts/bimit_invoiceSummary.php
@@ -1,66 +1,164 @@
-
+
+
+
Łączny przychód
+
587 679,40 PLN
+
+
+
+
Sprzedane jednostki
+
182 619 szt.
+
+
+
AOV — średnia wartość faktury
+
1 836,50 PLN
+
AOV = przychód / liczba faktur
+
+
-$db = $GLOBALS['db'];
+
+Top produkty wg przychodu
+
+
+
+
+ | Kod |
+ Produkt |
+ Przychód [PLN] |
+
+
+
+ | FR00099_250_Wilfa | WIUCC-250 CLEANING LIQUID COFFEEMAKER, 250 ml | 51 217,92 |
+ | AGDPR01 | Środek do czyszczenia pralek automatycznych | 47 500,00 |
+ | FR00013_1000_Drekker | Odkamieniacz do automatycznych ekspresów do kawy, 1000 ml | 30 600,00 |
+ | AGDCHRM01 | Płyn do robotów mopujących, 500ml | 22 277,70 |
+ | FR00016_10_2g_amz_de | Cleaning tablets for coffee machines, 10 x 2g | 19 426,00 |
+
+
+
-$query = "
-SELECT s.document_no, s.register_date, s.parent_name, s.total_netto, si.code, si.name, si.quantity, si.price_netto
-FROM ecmsaleitems AS si
-INNER JOIN ecmsales AS s ON si.ecmsale_id = s.id
-WHERE s.register_date >= NOW() - INTERVAL 7 DAY
-ORDER BY s.register_date DESC;
-";
+
+Top klienci wg przychodu
+
+
+
+
+ | Klient |
+ Przychód [PLN] |
+
+
+
+ | Euro-net Sp. z o.o. | 138 660,08 |
+ | Wilfa AS | 71 616,72 |
+ | Aqualogis Polska Sp. z o.o. | 58 108,20 |
+ | dm-drogerie markt Sp. z o.o. | 40 108,08 |
+ | MediaRange GmbH | 40 064,24 |
+
+
+
-$res = $db->query($query);
-
-$results = [];
-if ($res) {
- $columns = array_keys($db->fetchByAssoc($res));
- $results[] = $columns;
- mysqli_data_seek($res, 0);
- while ($row = $db->fetchByAssoc($res)) {
- $results[] = array_values($row);
- }
-}
-
-
-
-$jsonData = json_encode($results);
-$apiKey = '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';
-
-
-$messages = [
- [
- "role" => "system",
- "content" => "Jesteś analitykiem danych. Przygotuj szczegółowy raport sprzedaży na podstawie danych w formacie JSON (jest to lista zamówień z ostatnich 7 dni). Wygeneruj czysty kod HTML wewnątrz jednego , bez , ani . Raport powinien zawierać tabele, nagłówki, podsumowania, wnioski, rekomendacje i listę potencjalnych nieprawidłowości. Dane zachowaj w oryginale (nie tłumacz nazw). Zadbaj o estetyczny i uporządkowany układ raportu. Zwróć tylko kod HTML. Odpowiedz w języku polskim.",
- ],
- [
- "role" => "user",
- "content" => "Oto dane sprzedaży w formacie JSON:\n\n$jsonData"
- ]
-];
-
-$payload = [
- "model" => "gpt-4.1",
- "messages" => $messages,
- "temperature" => 0.3
-];
-
-$ch = curl_init('https://api.openai.com/v1/chat/completions');
-curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
-curl_setopt($ch, CURLOPT_HTTPHEADER, [
- 'Content-Type: application/json',
- 'Authorization: Bearer ' . $apiKey
-]);
-curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($payload));
-
-$response = curl_exec($ch);
-if (curl_errno($ch)) {
- echo 'Błąd: ' . curl_error($ch);
- exit;
-}
-
-
-$data = json_decode($response, true);
-$htmlReport = $data['choices'][0]['message']['content'];
-
-echo $htmlReport;
\ No newline at end of file
+
+
Alerty „mix change” (duże zmiany udziału produktu w przychodzie)
+
+
+
+
+ | Data |
+ Kod |
+ Produkt |
+ Przychód [PLN] |
+ Szt. |
+ Udział dnia |
+ Porównanie |
+ Δ udziału |
+ Baseline |
+
+
+
+
+ | 2025-07-01 |
+ FR00006_250_amz_de |
+ Płyn do czyszczenia pralek automatycznych, 250 ml |
+ 1 169,60 |
+ 136 |
+ 23,98% |
+ vs mediana miesiąca |
+ +19,99 pp |
+ 3,99% |
+
+
+ | 2025-07-02 |
+ SECO002 |
+ ECO Wilgotne Ściereczki do ekranów |
+ 1 826,52 |
+ 372 |
+ 24,93% |
+ vs mediana miesiąca |
+ +12,17 pp |
+ 12,75% |
+
+
+ | 2025-07-02 |
+ RE00094 |
+ Płyn do ekranów TABLET/SMARTFON/LCD/PLASMA, 250 ml |
+ 1 048,56 |
+ 204 |
+ 14,31% |
+ vs mediana miesiąca |
+ +6,55 pp |
+ 7,76% |
+
+
+ | 2025-07-09 |
+ ICL-6550-INT |
+ Compressed gas duster, 400 ml |
+ 3 494,40 |
+ 624 |
+ 10,09% |
+ vs mediana miesiąca |
+ +6,87 pp |
+ 3,23% |
+
+
+ | 2025-07-09 |
+ ICL-6575-INT |
+ Compressed gas duster, 600 ml |
+ 3 463,20 |
+ 444 |
+ 10,00% |
+ vs mediana miesiąca |
+ +4,39 pp |
+ 5,61% |
+
+
+ | 2025-07-10 |
+ ICL-6550-INT |
+ Compressed gas duster, 400 ml |
+ 1 881,60 |
+ 336 |
+ 3,05% |
+ vs rolling 7 dni |
+ −7,05 pp |
+ 10,09% |
+
+
+ | 2025-07-10 |
+ ICL-6575-INT |
+ Compressed gas duster, 600 ml |
+ 3 463,20 |
+ 444 |
+ 5,61% |
+ vs rolling 7 dni |
+ −4,39 pp |
+ 10,00% |
+
+
+ | 2025-07-17 |
+ FR00006_250_amz_de |
+ Płyn do czyszczenia pralek automatycznych, 250 ml |
+ 1 080,00 |
+ 144 |
+ 3
diff --git a/modules/EcmInvoiceOuts/invoice_ai_analysis.py b/modules/EcmInvoiceOuts/invoice_ai_analysis.py
new file mode 100644
index 00000000..8c79c207
--- /dev/null
+++ b/modules/EcmInvoiceOuts/invoice_ai_analysis.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Invoice AI Analysis — end-to-end script (MySQL -> KPIs -> Mix Change -> Anomalies -> HTML)
+See previous instructions for usage and requirements.
+"""
+
+from __future__ import annotations
+import os
+import sys
+import argparse
+from dataclasses import dataclass
+from typing import Any, List, Dict
+
+import numpy as np
+import pandas as pd
+
+from sqlalchemy import create_engine, text
+
+@dataclass
+class Config:
+ host: str
+ port: int
+ user: str
+ password: str
+ database: str
+ date_from: str
+ date_to: str
+ doc_type: str
+ output_html: str
+
+def parse_args() -> Config:
+ parser = argparse.ArgumentParser(description="Invoice AI Analysis (MySQL -> HTML)")
+ parser.add_argument("--host", default=os.getenv("DB_HOST", "localhost"))
+ parser.add_argument("--port", default=int(os.getenv("DB_PORT", "3306")), type=int)
+ parser.add_argument("--user", default=os.getenv("DB_USER", "root"))
+ parser.add_argument("--password", default=os.getenv("DB_PASS", "rootpassword"))
+ parser.add_argument("--database", default=os.getenv("DB_NAME", "twinpol-mysql56"))
+ parser.add_argument("--from", dest="date_from", default="2025-07-01")
+ parser.add_argument("--to", dest="date_to", default="2025-08-01")
+ parser.add_argument("--type", dest="doc_type", default="normal")
+ parser.add_argument("--out", dest="output_html", default="report.html")
+ args = parser.parse_args()
+ return Config(
+ host=args.host, port=args.port, user=args.user, password=args.password,
+ database=args.database, date_from=args.date_from, date_to=args.date_to,
+ doc_type=args.doc_type, output_html=args.output_html
+ )
+
+def get_engine(cfg: Config):
+ url = f"mysql+pymysql://{cfg.user}:{cfg.password}@{cfg.host}:{cfg.port}/{cfg.database}?charset=utf8mb4"
+ return create_engine(url, pool_recycle=3600, pool_pre_ping=True, future=True)
+
+def fetch_invoices(engine, cfg: Config) -> pd.DataFrame:
+ sql = text("""
+ SELECT i.document_no,
+ i.parent_name,
+ DATE(i.register_date) AS register_date,
+ ii.code,
+ ii.name,
+ ii.quantity,
+ ii.total_netto
+ FROM ecminvoiceoutitems AS ii
+ JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
+ WHERE i.register_date >= :date_from
+ AND i.register_date < :date_to
+ AND i.type = :doc_type
+ """)
+ with engine.connect() as con:
+ df = pd.read_sql(sql, con, params={
+ "date_from": cfg.date_from,
+ "date_to": cfg.date_to,
+ "doc_type": cfg.doc_type
+ })
+ df["register_date"] = pd.to_datetime(df["register_date"], errors="coerce")
+ df["quantity"] = pd.to_numeric(df["quantity"], errors="coerce")
+ df["total_netto"] = pd.to_numeric(df["total_netto"], errors="coerce")
+ return df.dropna(subset=["register_date", "quantity", "total_netto"])
+
+def compute_kpis(df: pd.DataFrame) -> Dict[str, Any]:
+ total_revenue = float(df["total_netto"].sum())
+ total_invoices = int(df["document_no"].nunique())
+ total_units = float(df["quantity"].sum())
+ aov = float(total_revenue / total_invoices) if total_invoices else 0.0
+ top_products = (df.groupby(["code", "name"], as_index=False)
+ .agg(total_netto=("total_netto", "sum"))
+ .sort_values("total_netto", ascending=False)
+ .head(5))
+ top_customers = (df.groupby(["parent_name"], as_index=False)
+ .agg(total_netto=("total_netto", "sum"))
+ .sort_values("total_netto", ascending=False)
+ .head(5))
+ return {
+ "total_revenue": total_revenue,
+ "total_invoices": total_invoices,
+ "total_units": total_units,
+ "aov": aov,
+ "top_products": top_products,
+ "top_customers": top_customers,
+ }
+
+def render_html(cfg: Config, kpis: Dict[str, Any]) -> str:
+ def fmt_cur(x): return f"{x:,.2f}".replace(",", " ").replace(".", ",")
+ def table(headers, rows):
+ th = "".join(f" | {h} | " for h in headers)
+ trs = "".join("
" + "".join(f"| {v} | " for v in row) + "
" for row in rows)
+ return f""
+ kpi_table = table(["Metryka", "Wartość"], [
+ ["Łączny przychód", f"{fmt_cur(kpis['total_revenue'])} PLN"],
+ ["Liczba faktur", f"{kpis['total_invoices']}"],
+ ["Sprzedane jednostki", f"{int(kpis['total_units']):,}".replace(",", " ")],
+ ["Średnia wartość faktury", f"{fmt_cur(kpis['aov'])} PLN"]
+ ])
+ prod_table = table(["Kod", "Produkt", "Przychód"], [
+ [r["code"], r["name"], fmt_cur(r["total_netto"]) + " PLN"]
+ for _, r in kpis["top_products"].iterrows()
+ ])
+ cust_table = table(["Klient", "Przychód"], [
+ [r["parent_name"], fmt_cur(r["total_netto"]) + " PLN"]
+ for _, r in kpis["top_customers"].iterrows()
+ ])
+ return f"""
+ Analiza faktur ({cfg.date_from} → {cfg.date_to})
+ KPI
{kpi_table}
+ Top produkty
{prod_table}
+ Top klienci
{cust_table}
+ """
+
+def main():
+ cfg = parse_args()
+ engine = get_engine(cfg)
+ df = fetch_invoices(engine, cfg)
+ if df.empty:
+ print("No data found.")
+ return
+ kpis = compute_kpis(df)
+ html = render_html(cfg, kpis)
+ with open(cfg.output_html, "w", encoding="utf-8") as f:
+ f.write(html)
+ print(f"Report written to {cfg.output_html}")
+
+if __name__ == "__main__":
+ main()
diff --git a/modules/EcmInvoiceOuts/invoice_summary.py b/modules/EcmInvoiceOuts/invoice_summary.py
new file mode 100644
index 00000000..62d6f1fb
--- /dev/null
+++ b/modules/EcmInvoiceOuts/invoice_summary.py
@@ -0,0 +1,2 @@
+
+print("Hello")