diff --git a/.gitignore b/.gitignore
index 68a9439b..a4f30b63 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,8 @@
!*.gif
!*.jpg
!*.png
+# allow python
+!*.py
# ...even if they are in subdirectories
!*/
diff --git a/REST/functions.php b/REST/functions.php
index 8389ea98..d2b00023 100644
--- a/REST/functions.php
+++ b/REST/functions.php
@@ -74,7 +74,7 @@ function createPzFromInvoice($record, $stockId)
$pz->position_list = array();
$pz->fromREST = true;
- $gotAllProducts = true; // hope :)
+ $gotAllProducts = true; // hope :)
foreach ($inv->position_list as $product) {
echo 'Produkt: ' . $product->product_code . '
';
$p = getProduct($product->product_code);
@@ -313,3 +313,131 @@ function brecho($msg)
var_dump($msg);
echo '
';
}
+// AI analysis
+function createCSVReports()
+{
+ {
+ $db = $GLOBALS['db'];
+ $exportDir = __DIR__ . "/export";
+
+ $jobs = [
+ [
+ 'sql' => "
+ SELECT
+ i.document_no,
+ i.register_date,
+ i.parent_name,
+ p.code,
+ p.name,
+ p.group_ks,
+ ii.quantity,
+ ii.price_netto
+ FROM ecminvoiceouts AS i
+ INNER JOIN ecminvoiceoutitems AS ii ON i.id = ii.ecminvoiceout_id
+ INNER JOIN ecmproducts AS p ON ii.ecmproduct_id = p.id
+ WHERE i.type = 'normal' AND YEAR(i.register_date) = 2024
+ ORDER BY i.register_date DESC
+ ",
+ 'filename' => 'invoices_2024_' . date('Ymd_His') . '.csv',
+ ],
+ [
+ 'sql' => "
+ SELECT code, name, SUM(ii.quantity) AS units, SUM(ii.price_netto*ii.quantity) AS revenue
+ FROM ecminvoiceoutitems ii
+ JOIN ecmproducts p ON p.id = ii.ecmproduct_id
+ GROUP BY code, name
+ ORDER BY revenue DESC
+ LIMIT 100
+ ",
+ 'filename' => 'top_products_' . date('Ymd_His') . '.csv',
+ ],
+ [
+ 'sql' =>"SELECT COUNT(*) FROM ecminvoiceouts WHERE YEAR(register_date)=2025",
+ 'filename' => 'ecminvoiceouts_2025_' . date('Ymd_His') . '.csv',
+ ],
+ // ... dopisz kolejne zestawienia ...
+ ];
+
+ $report = [];
+ foreach ($jobs as $job) {
+ $sql = $job['sql'];
+ $filename = $job['filename'];
+ $headers = isset($job['headers']) ? $job['headers'] : null;
+
+ $res = $db->query($sql);
+ $fullpath = rtrim($exportDir, "/") . "/" . $filename;
+
+ $result = exportToCSVFile($res, $fullpath, $headers, ';', true);
+
+ if ($result['ok']) {
+ $report[] = "OK → {$result['path']} (wiersze: {$result['rows']})
";
+ } else {
+ $report[] = "ERR → {$result['path']} ({$result['error']})
";
+ }
+ }
+
+ echo implode("\n", $report);
+ exit;
+ }
+}
+function exportToCSVFile($res, $fullpath, array $headers = null, $delimiter = ';', $withBom = true)
+{
+ $db = $GLOBALS['db'];
+
+ $dir = dirname($fullpath);
+ if (!is_dir($dir)) {
+ if (!@mkdir($dir, 0775, true)) {
+ return ['ok'=>false, 'path'=>$fullpath, 'rows'=>0, 'error'=>"Nie mogę utworzyć katalogu: $dir"];
+ }
+ }
+ if (!is_writable($dir)) {
+ return ['ok'=>false, 'path'=>$fullpath, 'rows'=>0, 'error'=>"Katalog nie jest zapisywalny: $dir"];
+ }
+
+ $fp = @fopen($fullpath, 'w');
+ if ($fp === false) {
+ return ['ok'=>false, 'path'=>$fullpath, 'rows'=>0, 'error'=>"Nie mogę otworzyć pliku do zapisu: $fullpath"];
+ }
+
+ // BOM dla Excel PL
+ if ($withBom) {
+ fwrite($fp, "\xEF\xBB\xBF");
+ }
+
+ // pobierz pierwszy wiersz, by ewentualnie zbudować nagłówki
+ $first = $db->fetchByAssoc($res);
+
+ // brak danych → pusty plik z ewentualnym nagłówkiem (jeśli podany ręcznie)
+ if (!$first) {
+ if ($headers !== null) {
+ fputcsv($fp, $headers, $delimiter);
+ }
+ fclose($fp);
+ return ['ok'=>true, 'path'=>$fullpath, 'rows'=>0, 'error'=>null];
+ }
+
+ // dynamiczne nagłówki, jeśli nie podano
+ if ($headers === null) {
+ $headers = array_keys($first);
+ }
+
+ // wypisz nagłówki
+ fputcsv($fp, $headers, $delimiter);
+
+ // zapisz pierwszy wiersz w kolejności nagłówków
+ $line = [];
+ foreach ($headers as $h) { $line[] = isset($first[$h]) ? $first[$h] : ''; }
+ fputcsv($fp, $line, $delimiter);
+ $count = 1;
+
+ // pozostałe wiersze
+ while ($row = $db->fetchByAssoc($res)) {
+ $line = [];
+ foreach ($headers as $h) { $line[] = isset($row[$h]) ? $row[$h] : ''; }
+ fputcsv($fp, $line, $delimiter);
+ $count++;
+ }
+
+ fclose($fp);
+ return ['ok'=>true, 'path'=>$fullpath, 'rows'=>$count, 'error'=>null];
+}
\ No newline at end of file
diff --git a/REST/index.php b/REST/index.php
index 60b4af4d..16931b6d 100644
--- a/REST/index.php
+++ b/REST/index.php
@@ -38,6 +38,9 @@
case 'createCostDocumentFromInvoice':
createCostDocumentFromInvoice($_GET['record']);
break;
+ case 'createCSVReports':
+ createCSVReports();
+ break;
}
// https://crm.twinpol.com/REST/index.php?key=d68dac4c-f784-4e1b-8267-9ffcfa0eda4c&action=createCostDocumentFromInvoice&record=c3f6eaa6-0cbd-8c89-1a8c-683ff19a36db
?>
\ No newline at end of file
diff --git a/modules/EcmInvoiceOuts/Menu.php b/modules/EcmInvoiceOuts/Menu.php
index d0d043f4..6f7fe3b2 100755
--- a/modules/EcmInvoiceOuts/Menu.php
+++ b/modules/EcmInvoiceOuts/Menu.php
@@ -103,4 +103,6 @@ if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
if(ACLController::checkAccess('EcmInvoiceOuts', "list", true)) $module_menu [] = Array("index.php?module=EcmInvoiceOuts&action=index&return_module=EcmInvoiceOuts&return_action=DetailView", translate('LNK_ECMQUOTES_LIST','EcmInvoiceOuts'),"EcmInvoiceOuts", 'EcmInvoiceOuts');
if(ACLController::checkAccess('EcmInvoiceOuts', "list", true)) $module_menu [] = Array("index.php?module=EcmInvoiceOuts&action=Report_INTRASTAT", "Raport INTRASTAT","EcmInvoiceOuts", 'EcmInvoiceOuts');
- if(ACLController::checkAccess('EcmInvoiceOuts', "list", true)) $module_menu [] = Array("index.php?module=EcmInvoiceOuts&action=ecommerce", "Faktury E-Commerce","EcmInvoiceOuts", 'EcmInvoiceOuts');
\ No newline at end of file
+ if(ACLController::checkAccess('EcmInvoiceOuts', "list", true)) $module_menu [] = Array("index.php?module=EcmInvoiceOuts&action=ecommerce", "Faktury E-Commerce","EcmInvoiceOuts", 'EcmInvoiceOuts');
+
+ if(ACLController::checkAccess('EcmInvoiceOuts', "list", true)) $module_menu [] = Array("index.php?module=EcmInvoiceOuts&action=bimit_invoiceSummary", "Analiza faktur","EcmInvoiceOuts", 'EcmInvoiceOuts');
\ No newline at end of file
diff --git a/modules/EcmInvoiceOuts/ai/enqueue.php b/modules/EcmInvoiceOuts/ai/enqueue.php
new file mode 100644
index 00000000..d8b10d73
--- /dev/null
+++ b/modules/EcmInvoiceOuts/ai/enqueue.php
@@ -0,0 +1,21 @@
+ $id]);
diff --git a/modules/EcmInvoiceOuts/ai/result.php b/modules/EcmInvoiceOuts/ai/result.php
new file mode 100644
index 00000000..81a59317
--- /dev/null
+++ b/modules/EcmInvoiceOuts/ai/result.php
@@ -0,0 +1,12 @@
+ pl.DataFrame:
+ conn = pymysql.connect(**MYSQL_CONF)
+ try:
+ with conn.cursor() as cur:
+ cur.execute(sql, params)
+ rows = cur.fetchall()
+ finally:
+ conn.close()
+ return pl.from_dicts(rows)
+
+def to_csv(df: pl.DataFrame) -> str:
+ buf = io.StringIO()
+ df.write_csv(buf)
+ return buf.getvalue()
+
+SQL_KPIS_DAILY = """
+SELECT DATE(invoice_date) AS d,
+ SUM(net_amount) AS revenue,
+ SUM(quantity) AS qty,
+ ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
+ ROUND(100*SUM(discount_amount)/NULLIF(SUM(gross_amount),0), 2) AS discount_pct
+FROM fact_invoices
+WHERE invoice_date BETWEEN %s AND %s
+GROUP BY 1
+ORDER BY 1;
+"""
+
+SQL_TOP_SEGMENTS = """
+SELECT {axis} AS key,
+ ANY_VALUE({label}) AS label,
+ SUM(net_amount) AS revenue,
+ SUM(quantity) AS qty,
+ ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
+ ROUND(100*(SUM(net_amount) - LAG(SUM(net_amount)) OVER(ORDER BY 1))/
+ NULLIF(LAG(SUM(net_amount)) OVER(ORDER BY 1),0), 2) AS trend_30d
+FROM fact_invoices
+WHERE invoice_date BETWEEN DATE_SUB(%s, INTERVAL 60 DAY) AND %s
+GROUP BY 1
+ORDER BY revenue DESC
+LIMIT %s;
+"""
+
+class AIClient:
+ def __init__(self, api_key: str): self.api_key = api_key
+ @retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
+ def structured_analysis(self, prompt: str, schema: Dict[str, Any]) -> Dict[str, Any]:
+ # TODO: PODMIEŃ na realne wywołanie modelu z "Structured Outputs"
+ raise NotImplementedError("Wire your model SDK here")
+
+ @retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
+ def batch_submit(self, ndjson_lines: List[str]) -> str:
+ # TODO: PODMIEŃ na rzeczywiste Batch API
+ raise NotImplementedError
+
+def run_online(from_date: str, to_date: str, currency: str, axis: str, label: str, top_n: int, goal: str) -> Dict[str, Any]:
+ kpis = mysql_query(SQL_KPIS_DAILY, (from_date, to_date))
+ top = mysql_query(SQL_TOP_SEGMENTS.format(axis=axis, label=label), (from_date, to_date, top_n))
+
+ csv_blocks = ("## kpis_daily\n" + to_csv(kpis) + "\n\n" +
+ "## top_segments\n" + to_csv(top))
+
+ with open(os.path.join(os.path.dirname(__file__), "sales-analysis.schema.json"), "r", encoding="utf-8") as f:
+ schema = json.load(f)
+
+ prompt = f"""
+Jesteś analitykiem sprzedaży. Otrzymasz: (a) kontekst, (b) dane.
+Zwróć **wyłącznie** JSON zgodny ze schema.
+
+Kontekst:
+- Waluta: {currency}
+- Zakres: {from_date} → {to_date}
+- Cel: {goal}
+- Poziom segmentacji: {axis}
+
+Dane (CSV):
+{csv_blocks}
+
+Wskazówki:
+- Użyj danych jak są (nie wymyślaj liczb).
+- W meta.scope wpisz opis zakresu i segmentacji.
+- Jeśli brak anomalii – anomalies: [].
+- Kwoty do 2 miejsc, procenty do 1.
+"""
+
+ ai = AIClient(AI_API_KEY)
+ result = ai.structured_analysis(prompt, schema)
+
+ out_dir = os.path.join(os.path.dirname(__file__), "out")
+ os.makedirs(out_dir, exist_ok=True)
+ out_path = os.path.join(out_dir, f"{uuid.uuid4()}.json")
+ with open(out_path, "w", encoding="utf-8") as f:
+ json.dump(result, f, ensure_ascii=False)
+ return {"status": "ok", "path": out_path}
+
+def run_batch(from_date: str, to_date: str, axis: str, label: str):
+ # Zgodnie z blueprintem – generujemy linie NDJSON (skrót; pełny wariant w PDF)
+ # TODO: dodać realne wywołania batch_submit i zapisać ID/stan
+ raise NotImplementedError("Implement batch per blueprint")
+
+if __name__ == "__main__":
+ import argparse
+ p = argparse.ArgumentParser()
+ sub = p.add_subparsers(dest="cmd")
+ o = sub.add_parser("online")
+ o.add_argument("from_date"); o.add_argument("to_date"); o.add_argument("currency")
+ o.add_argument("axis", choices=["sku_id","client_id","region_code"])
+ o.add_argument("label"); o.add_argument("top_n", type=int, nargs="?", default=50)
+ o.add_argument("goal")
+ b = sub.add_parser("batch")
+ b.add_argument("from_date"); b.add_argument("to_date"); b.add_argument("axis"); b.add_argument("label")
+ args = p.parse_args()
+
+ if args.cmd == "online":
+ print(run_online(args.from_date, args.to_date, args.currency, args.axis, args.label, args.top_n, args.goal))
+ elif args.cmd == "batch":
+ print(run_batch(args.from_date, args.to_date, args.axis, args.label))
+ else:
+ p.print_help()
diff --git a/modules/EcmInvoiceOuts/bimit_invoiceSummary.php b/modules/EcmInvoiceOuts/bimit_invoiceSummary.php
new file mode 100644
index 00000000..ec32374d
--- /dev/null
+++ b/modules/EcmInvoiceOuts/bimit_invoiceSummary.php
@@ -0,0 +1,164 @@
+
+
+
+
Łączny przychód
+
587 679,40 PLN
+
+
+
+
Sprzedane jednostki
+
182 619 szt.
+
+
+
AOV — średnia wartość faktury
+
1 836,50 PLN
+
AOV = przychód / liczba faktur
+
+
+
+
+Top produkty wg przychodu
+
+
+
+
+ | Kod |
+ Produkt |
+ Przychód [PLN] |
+
+
+
+ | FR00099_250_Wilfa | WIUCC-250 CLEANING LIQUID COFFEEMAKER, 250 ml | 51 217,92 |
+ | AGDPR01 | Środek do czyszczenia pralek automatycznych | 47 500,00 |
+ | FR00013_1000_Drekker | Odkamieniacz do automatycznych ekspresów do kawy, 1000 ml | 30 600,00 |
+ | AGDCHRM01 | Płyn do robotów mopujących, 500ml | 22 277,70 |
+ | FR00016_10_2g_amz_de | Cleaning tablets for coffee machines, 10 x 2g | 19 426,00 |
+
+
+
+
+
+Top klienci wg przychodu
+
+
+
+
+ | Klient |
+ Przychód [PLN] |
+
+
+
+ | Euro-net Sp. z o.o. | 138 660,08 |
+ | Wilfa AS | 71 616,72 |
+ | Aqualogis Polska Sp. z o.o. | 58 108,20 |
+ | dm-drogerie markt Sp. z o.o. | 40 108,08 |
+ | MediaRange GmbH | 40 064,24 |
+
+
+
+
+
+Alerty „mix change” (duże zmiany udziału produktu w przychodzie)
+
+
+
+
+ | Data |
+ Kod |
+ Produkt |
+ Przychód [PLN] |
+ Szt. |
+ Udział dnia |
+ Porównanie |
+ Δ udziału |
+ Baseline |
+
+
+
+
+ | 2025-07-01 |
+ FR00006_250_amz_de |
+ Płyn do czyszczenia pralek automatycznych, 250 ml |
+ 1 169,60 |
+ 136 |
+ 23,98% |
+ vs mediana miesiąca |
+ +19,99 pp |
+ 3,99% |
+
+
+ | 2025-07-02 |
+ SECO002 |
+ ECO Wilgotne Ściereczki do ekranów |
+ 1 826,52 |
+ 372 |
+ 24,93% |
+ vs mediana miesiąca |
+ +12,17 pp |
+ 12,75% |
+
+
+ | 2025-07-02 |
+ RE00094 |
+ Płyn do ekranów TABLET/SMARTFON/LCD/PLASMA, 250 ml |
+ 1 048,56 |
+ 204 |
+ 14,31% |
+ vs mediana miesiąca |
+ +6,55 pp |
+ 7,76% |
+
+
+ | 2025-07-09 |
+ ICL-6550-INT |
+ Compressed gas duster, 400 ml |
+ 3 494,40 |
+ 624 |
+ 10,09% |
+ vs mediana miesiąca |
+ +6,87 pp |
+ 3,23% |
+
+
+ | 2025-07-09 |
+ ICL-6575-INT |
+ Compressed gas duster, 600 ml |
+ 3 463,20 |
+ 444 |
+ 10,00% |
+ vs mediana miesiąca |
+ +4,39 pp |
+ 5,61% |
+
+
+ | 2025-07-10 |
+ ICL-6550-INT |
+ Compressed gas duster, 400 ml |
+ 1 881,60 |
+ 336 |
+ 3,05% |
+ vs rolling 7 dni |
+ −7,05 pp |
+ 10,09% |
+
+
+ | 2025-07-10 |
+ ICL-6575-INT |
+ Compressed gas duster, 600 ml |
+ 3 463,20 |
+ 444 |
+ 5,61% |
+ vs rolling 7 dni |
+ −4,39 pp |
+ 10,00% |
+
+
+ | 2025-07-17 |
+ FR00006_250_amz_de |
+ Płyn do czyszczenia pralek automatycznych, 250 ml |
+ 1 080,00 |
+ 144 |
+ 3
diff --git a/modules/EcmInvoiceOuts/invoice_ai_analysis.py b/modules/EcmInvoiceOuts/invoice_ai_analysis.py
new file mode 100644
index 00000000..8c79c207
--- /dev/null
+++ b/modules/EcmInvoiceOuts/invoice_ai_analysis.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Invoice AI Analysis — end-to-end script (MySQL -> KPIs -> Mix Change -> Anomalies -> HTML)
+See previous instructions for usage and requirements.
+"""
+
+from __future__ import annotations
+import os
+import sys
+import argparse
+from dataclasses import dataclass
+from typing import Any, List, Dict
+
+import numpy as np
+import pandas as pd
+
+from sqlalchemy import create_engine, text
+
+@dataclass
+class Config:
+ host: str
+ port: int
+ user: str
+ password: str
+ database: str
+ date_from: str
+ date_to: str
+ doc_type: str
+ output_html: str
+
+def parse_args() -> Config:
+ parser = argparse.ArgumentParser(description="Invoice AI Analysis (MySQL -> HTML)")
+ parser.add_argument("--host", default=os.getenv("DB_HOST", "localhost"))
+ parser.add_argument("--port", default=int(os.getenv("DB_PORT", "3306")), type=int)
+ parser.add_argument("--user", default=os.getenv("DB_USER", "root"))
+ parser.add_argument("--password", default=os.getenv("DB_PASS", "rootpassword"))
+ parser.add_argument("--database", default=os.getenv("DB_NAME", "twinpol-mysql56"))
+ parser.add_argument("--from", dest="date_from", default="2025-07-01")
+ parser.add_argument("--to", dest="date_to", default="2025-08-01")
+ parser.add_argument("--type", dest="doc_type", default="normal")
+ parser.add_argument("--out", dest="output_html", default="report.html")
+ args = parser.parse_args()
+ return Config(
+ host=args.host, port=args.port, user=args.user, password=args.password,
+ database=args.database, date_from=args.date_from, date_to=args.date_to,
+ doc_type=args.doc_type, output_html=args.output_html
+ )
+
+def get_engine(cfg: Config):
+ url = f"mysql+pymysql://{cfg.user}:{cfg.password}@{cfg.host}:{cfg.port}/{cfg.database}?charset=utf8mb4"
+ return create_engine(url, pool_recycle=3600, pool_pre_ping=True, future=True)
+
+def fetch_invoices(engine, cfg: Config) -> pd.DataFrame:
+ sql = text("""
+ SELECT i.document_no,
+ i.parent_name,
+ DATE(i.register_date) AS register_date,
+ ii.code,
+ ii.name,
+ ii.quantity,
+ ii.total_netto
+ FROM ecminvoiceoutitems AS ii
+ JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
+ WHERE i.register_date >= :date_from
+ AND i.register_date < :date_to
+ AND i.type = :doc_type
+ """)
+ with engine.connect() as con:
+ df = pd.read_sql(sql, con, params={
+ "date_from": cfg.date_from,
+ "date_to": cfg.date_to,
+ "doc_type": cfg.doc_type
+ })
+ df["register_date"] = pd.to_datetime(df["register_date"], errors="coerce")
+ df["quantity"] = pd.to_numeric(df["quantity"], errors="coerce")
+ df["total_netto"] = pd.to_numeric(df["total_netto"], errors="coerce")
+ return df.dropna(subset=["register_date", "quantity", "total_netto"])
+
+def compute_kpis(df: pd.DataFrame) -> Dict[str, Any]:
+ total_revenue = float(df["total_netto"].sum())
+ total_invoices = int(df["document_no"].nunique())
+ total_units = float(df["quantity"].sum())
+ aov = float(total_revenue / total_invoices) if total_invoices else 0.0
+ top_products = (df.groupby(["code", "name"], as_index=False)
+ .agg(total_netto=("total_netto", "sum"))
+ .sort_values("total_netto", ascending=False)
+ .head(5))
+ top_customers = (df.groupby(["parent_name"], as_index=False)
+ .agg(total_netto=("total_netto", "sum"))
+ .sort_values("total_netto", ascending=False)
+ .head(5))
+ return {
+ "total_revenue": total_revenue,
+ "total_invoices": total_invoices,
+ "total_units": total_units,
+ "aov": aov,
+ "top_products": top_products,
+ "top_customers": top_customers,
+ }
+
+def render_html(cfg: Config, kpis: Dict[str, Any]) -> str:
+ def fmt_cur(x): return f"{x:,.2f}".replace(",", " ").replace(".", ",")
+ def table(headers, rows):
+ th = "".join(f" | {h} | " for h in headers)
+ trs = "".join("
" + "".join(f"| {v} | " for v in row) + "
" for row in rows)
+ return f""
+ kpi_table = table(["Metryka", "Wartość"], [
+ ["Łączny przychód", f"{fmt_cur(kpis['total_revenue'])} PLN"],
+ ["Liczba faktur", f"{kpis['total_invoices']}"],
+ ["Sprzedane jednostki", f"{int(kpis['total_units']):,}".replace(",", " ")],
+ ["Średnia wartość faktury", f"{fmt_cur(kpis['aov'])} PLN"]
+ ])
+ prod_table = table(["Kod", "Produkt", "Przychód"], [
+ [r["code"], r["name"], fmt_cur(r["total_netto"]) + " PLN"]
+ for _, r in kpis["top_products"].iterrows()
+ ])
+ cust_table = table(["Klient", "Przychód"], [
+ [r["parent_name"], fmt_cur(r["total_netto"]) + " PLN"]
+ for _, r in kpis["top_customers"].iterrows()
+ ])
+ return f"""
+ Analiza faktur ({cfg.date_from} → {cfg.date_to})
+ KPI
{kpi_table}
+ Top produkty
{prod_table}
+ Top klienci
{cust_table}
+ """
+
+def main():
+ cfg = parse_args()
+ engine = get_engine(cfg)
+ df = fetch_invoices(engine, cfg)
+ if df.empty:
+ print("No data found.")
+ return
+ kpis = compute_kpis(df)
+ html = render_html(cfg, kpis)
+ with open(cfg.output_html, "w", encoding="utf-8") as f:
+ f.write(html)
+ print(f"Report written to {cfg.output_html}")
+
+if __name__ == "__main__":
+ main()
diff --git a/modules/EcmInvoiceOuts/test.php b/modules/EcmInvoiceOuts/test.php
old mode 100755
new mode 100644
index 0a6f980f..fc8dc317
--- a/modules/EcmInvoiceOuts/test.php
+++ b/modules/EcmInvoiceOuts/test.php
@@ -1,18 +1,16 @@
-&1', $output, $returnVar);
+if ($returnVar !== 0) {
+ http_response_code(500);
+ echo "Error running Python script:\n" . implode("\n", $output);
+ exit;
+}
-$url='http://damznac.pl/login'; // Specify your url
-$data= array('username'=>'lol','api_key'=>'val'); // Add parameters in key value
-$ch = curl_init(); // Initialize cURL
-curl_setopt($ch, CURLOPT_URL,$url);
-curl_setopt($ch, CURLOPT_FOLLOWLOCATION, TRUE);
-
-//curl_setopt($ch, CURLOPT_POSTFIELDS, http_build_query($data));
-curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
-$return= curl_exec($ch);
-var_dump($return);
-curl_close($ch);
-
-
-?>
\ No newline at end of file
+// Expect a single line with the count
+echo trim(implode("\n", $output));