sprzadaz analiza AI
This commit is contained in:
21
modules/EcmInvoiceOuts/ai/enqueue.php
Normal file
21
modules/EcmInvoiceOuts/ai/enqueue.php
Normal file
@@ -0,0 +1,21 @@
|
||||
<?php
|
||||
// modules/EcmInvoiceOuts/ai/enqueue.php
|
||||
$from = $_POST['from'] ?? null;
|
||||
$to = $_POST['to'] ?? null;
|
||||
$currency = $_POST['currency'] ?? 'PLN';
|
||||
$axis = $_POST['axis'] ?? 'sku_id';
|
||||
$label = $_POST['label'] ?? 'sku_name';
|
||||
$top_n = (int)($_POST['top_n'] ?? 50);
|
||||
$goal = $_POST['goal'] ?? 'porównanie Q2 vs Q1';
|
||||
|
||||
if (!$from || !$to) { http_response_code(400); exit('Missing from/to'); }
|
||||
|
||||
$base = __DIR__;
|
||||
@mkdir("$base/queue", 0777, true);
|
||||
|
||||
$payload = compact('from','to','currency','axis','label','top_n','goal');
|
||||
$id = bin2hex(random_bytes(8));
|
||||
file_put_contents("$base/queue/$id.json", json_encode($payload, JSON_UNESCAPED_UNICODE));
|
||||
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode(['job_id' => $id]);
|
||||
12
modules/EcmInvoiceOuts/ai/result.php
Normal file
12
modules/EcmInvoiceOuts/ai/result.php
Normal file
@@ -0,0 +1,12 @@
|
||||
<?php
|
||||
// modules/EcmInvoiceOuts/ai/result.php
|
||||
$base = __DIR__;
|
||||
$files = glob("$base/out/*.json");
|
||||
rsort($files);
|
||||
$latest = $files[0] ?? null;
|
||||
|
||||
if (!$latest) { http_response_code(404); exit('Brak wyników'); }
|
||||
|
||||
$payload = json_decode(file_get_contents($latest), true);
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode($payload, JSON_UNESCAPED_UNICODE | JSON_PRETTY_PRINT);
|
||||
141
modules/EcmInvoiceOuts/ai/worker.py
Normal file
141
modules/EcmInvoiceOuts/ai/worker.py
Normal file
@@ -0,0 +1,141 @@
|
||||
# worker.py
|
||||
import os, json, io, uuid
|
||||
import datetime as dt
|
||||
from typing import Dict, Any, List
|
||||
|
||||
import polars as pl
|
||||
import pymysql
|
||||
from tenacity import retry, wait_exponential, stop_after_attempt
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
AI_MODEL = os.getenv("AI_MODEL", "gpt-5-pro")
|
||||
AI_API_KEY = os.getenv("AI_API_KEY")
|
||||
|
||||
MYSQL_CONF = dict(
|
||||
host=os.getenv("MYSQL_HOST", "localhost"),
|
||||
user=os.getenv("MYSQL_USER", "root"),
|
||||
password=os.getenv("MYSQL_PASSWORD", ""),
|
||||
database=os.getenv("MYSQL_DB", "sales"),
|
||||
cursorclass=pymysql.cursors.DictCursor,
|
||||
)
|
||||
|
||||
def mysql_query(sql: str, params: tuple = ()) -> pl.DataFrame:
|
||||
conn = pymysql.connect(**MYSQL_CONF)
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, params)
|
||||
rows = cur.fetchall()
|
||||
finally:
|
||||
conn.close()
|
||||
return pl.from_dicts(rows)
|
||||
|
||||
def to_csv(df: pl.DataFrame) -> str:
|
||||
buf = io.StringIO()
|
||||
df.write_csv(buf)
|
||||
return buf.getvalue()
|
||||
|
||||
SQL_KPIS_DAILY = """
|
||||
SELECT DATE(invoice_date) AS d,
|
||||
SUM(net_amount) AS revenue,
|
||||
SUM(quantity) AS qty,
|
||||
ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
|
||||
ROUND(100*SUM(discount_amount)/NULLIF(SUM(gross_amount),0), 2) AS discount_pct
|
||||
FROM fact_invoices
|
||||
WHERE invoice_date BETWEEN %s AND %s
|
||||
GROUP BY 1
|
||||
ORDER BY 1;
|
||||
"""
|
||||
|
||||
SQL_TOP_SEGMENTS = """
|
||||
SELECT {axis} AS key,
|
||||
ANY_VALUE({label}) AS label,
|
||||
SUM(net_amount) AS revenue,
|
||||
SUM(quantity) AS qty,
|
||||
ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
|
||||
ROUND(100*(SUM(net_amount) - LAG(SUM(net_amount)) OVER(ORDER BY 1))/
|
||||
NULLIF(LAG(SUM(net_amount)) OVER(ORDER BY 1),0), 2) AS trend_30d
|
||||
FROM fact_invoices
|
||||
WHERE invoice_date BETWEEN DATE_SUB(%s, INTERVAL 60 DAY) AND %s
|
||||
GROUP BY 1
|
||||
ORDER BY revenue DESC
|
||||
LIMIT %s;
|
||||
"""
|
||||
|
||||
class AIClient:
|
||||
def __init__(self, api_key: str): self.api_key = api_key
|
||||
@retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
|
||||
def structured_analysis(self, prompt: str, schema: Dict[str, Any]) -> Dict[str, Any]:
|
||||
# TODO: PODMIEŃ na realne wywołanie modelu z "Structured Outputs"
|
||||
raise NotImplementedError("Wire your model SDK here")
|
||||
|
||||
@retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
|
||||
def batch_submit(self, ndjson_lines: List[str]) -> str:
|
||||
# TODO: PODMIEŃ na rzeczywiste Batch API
|
||||
raise NotImplementedError
|
||||
|
||||
def run_online(from_date: str, to_date: str, currency: str, axis: str, label: str, top_n: int, goal: str) -> Dict[str, Any]:
|
||||
kpis = mysql_query(SQL_KPIS_DAILY, (from_date, to_date))
|
||||
top = mysql_query(SQL_TOP_SEGMENTS.format(axis=axis, label=label), (from_date, to_date, top_n))
|
||||
|
||||
csv_blocks = ("## kpis_daily\n" + to_csv(kpis) + "\n\n" +
|
||||
"## top_segments\n" + to_csv(top))
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), "sales-analysis.schema.json"), "r", encoding="utf-8") as f:
|
||||
schema = json.load(f)
|
||||
|
||||
prompt = f"""
|
||||
Jesteś analitykiem sprzedaży. Otrzymasz: (a) kontekst, (b) dane.
|
||||
Zwróć **wyłącznie** JSON zgodny ze schema.
|
||||
|
||||
Kontekst:
|
||||
- Waluta: {currency}
|
||||
- Zakres: {from_date} → {to_date}
|
||||
- Cel: {goal}
|
||||
- Poziom segmentacji: {axis}
|
||||
|
||||
Dane (CSV):
|
||||
{csv_blocks}
|
||||
|
||||
Wskazówki:
|
||||
- Użyj danych jak są (nie wymyślaj liczb).
|
||||
- W meta.scope wpisz opis zakresu i segmentacji.
|
||||
- Jeśli brak anomalii – anomalies: [].
|
||||
- Kwoty do 2 miejsc, procenty do 1.
|
||||
"""
|
||||
|
||||
ai = AIClient(AI_API_KEY)
|
||||
result = ai.structured_analysis(prompt, schema)
|
||||
|
||||
out_dir = os.path.join(os.path.dirname(__file__), "out")
|
||||
os.makedirs(out_dir, exist_ok=True)
|
||||
out_path = os.path.join(out_dir, f"{uuid.uuid4()}.json")
|
||||
with open(out_path, "w", encoding="utf-8") as f:
|
||||
json.dump(result, f, ensure_ascii=False)
|
||||
return {"status": "ok", "path": out_path}
|
||||
|
||||
def run_batch(from_date: str, to_date: str, axis: str, label: str):
|
||||
# Zgodnie z blueprintem – generujemy linie NDJSON (skrót; pełny wariant w PDF)
|
||||
# TODO: dodać realne wywołania batch_submit i zapisać ID/stan
|
||||
raise NotImplementedError("Implement batch per blueprint")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
p = argparse.ArgumentParser()
|
||||
sub = p.add_subparsers(dest="cmd")
|
||||
o = sub.add_parser("online")
|
||||
o.add_argument("from_date"); o.add_argument("to_date"); o.add_argument("currency")
|
||||
o.add_argument("axis", choices=["sku_id","client_id","region_code"])
|
||||
o.add_argument("label"); o.add_argument("top_n", type=int, nargs="?", default=50)
|
||||
o.add_argument("goal")
|
||||
b = sub.add_parser("batch")
|
||||
b.add_argument("from_date"); b.add_argument("to_date"); b.add_argument("axis"); b.add_argument("label")
|
||||
args = p.parse_args()
|
||||
|
||||
if args.cmd == "online":
|
||||
print(run_online(args.from_date, args.to_date, args.currency, args.axis, args.label, args.top_n, args.goal))
|
||||
elif args.cmd == "batch":
|
||||
print(run_batch(args.from_date, args.to_date, args.axis, args.label))
|
||||
else:
|
||||
p.print_help()
|
||||
@@ -1,66 +1,164 @@
|
||||
<?php
|
||||
<!-- KPI -->
|
||||
<div class="grid">
|
||||
<div class="card">
|
||||
<div class="kpi-label">Łączny przychód</div>
|
||||
<div class="kpi-value">587 679,40 PLN</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
<div class="kpi-label">Liczba faktur</div>
|
||||
<div class="kpi-value">320</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
<div class="kpi-label">Sprzedane jednostki</div>
|
||||
<div class="kpi-value">182 619 szt.</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
<div class="kpi-label">AOV — średnia wartość faktury</div>
|
||||
<div class="kpi-value">1 836,50 PLN</div>
|
||||
<div class="kpi-sub muted">AOV = przychód / liczba faktur</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
$db = $GLOBALS['db'];
|
||||
<!-- Top produkty -->
|
||||
<h2>Top produkty wg przychodu</h2>
|
||||
<div class="card table-card">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width:22%">Kod</th>
|
||||
<th>Produkt</th>
|
||||
<th style="width:18%">Przychód [PLN]</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr><td>FR00099_250_Wilfa</td><td>WIUCC-250 CLEANING LIQUID COFFEEMAKER, 250 ml</td><td>51 217,92</td></tr>
|
||||
<tr><td>AGDPR01</td><td>Środek do czyszczenia pralek automatycznych</td><td>47 500,00</td></tr>
|
||||
<tr><td>FR00013_1000_Drekker</td><td>Odkamieniacz do automatycznych ekspresów do kawy, 1000 ml</td><td>30 600,00</td></tr>
|
||||
<tr><td>AGDCHRM01</td><td>Płyn do robotów mopujących, 500ml</td><td>22 277,70</td></tr>
|
||||
<tr><td>FR00016_10_2g_amz_de</td><td>Cleaning tablets for coffee machines, 10 x 2g</td><td>19 426,00</td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
$query = "
|
||||
SELECT s.document_no, s.register_date, s.parent_name, s.total_netto, si.code, si.name, si.quantity, si.price_netto
|
||||
FROM ecmsaleitems AS si
|
||||
INNER JOIN ecmsales AS s ON si.ecmsale_id = s.id
|
||||
WHERE s.register_date >= NOW() - INTERVAL 7 DAY
|
||||
ORDER BY s.register_date DESC;
|
||||
";
|
||||
<!-- Top klienci -->
|
||||
<h2>Top klienci wg przychodu</h2>
|
||||
<div class="card table-card">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Klient</th>
|
||||
<th style="width:20%">Przychód [PLN]</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr><td>Euro-net Sp. z o.o.</td><td>138 660,08</td></tr>
|
||||
<tr><td>Wilfa AS</td><td>71 616,72</td></tr>
|
||||
<tr><td>Aqualogis Polska Sp. z o.o.</td><td>58 108,20</td></tr>
|
||||
<tr><td>dm-drogerie markt Sp. z o.o.</td><td>40 108,08</td></tr>
|
||||
<tr><td>MediaRange GmbH</td><td>40 064,24</td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
$res = $db->query($query);
|
||||
|
||||
$results = [];
|
||||
if ($res) {
|
||||
$columns = array_keys($db->fetchByAssoc($res));
|
||||
$results[] = $columns;
|
||||
mysqli_data_seek($res, 0);
|
||||
while ($row = $db->fetchByAssoc($res)) {
|
||||
$results[] = array_values($row);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
$jsonData = json_encode($results);
|
||||
$apiKey = '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';
|
||||
|
||||
|
||||
$messages = [
|
||||
[
|
||||
"role" => "system",
|
||||
"content" => "Jesteś analitykiem danych. Przygotuj szczegółowy raport sprzedaży na podstawie danych w formacie JSON (jest to lista zamówień z ostatnich 7 dni). Wygeneruj czysty kod HTML wewnątrz jednego <div>, bez <html>, <head> ani <body>. Raport powinien zawierać tabele, nagłówki, podsumowania, wnioski, rekomendacje i listę potencjalnych nieprawidłowości. Dane zachowaj w oryginale (nie tłumacz nazw). Zadbaj o estetyczny i uporządkowany układ raportu. Zwróć tylko kod HTML. Odpowiedz w języku polskim.",
|
||||
],
|
||||
[
|
||||
"role" => "user",
|
||||
"content" => "Oto dane sprzedaży w formacie JSON:\n\n$jsonData"
|
||||
]
|
||||
];
|
||||
|
||||
$payload = [
|
||||
"model" => "gpt-4.1",
|
||||
"messages" => $messages,
|
||||
"temperature" => 0.3
|
||||
];
|
||||
|
||||
$ch = curl_init('https://api.openai.com/v1/chat/completions');
|
||||
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
||||
curl_setopt($ch, CURLOPT_HTTPHEADER, [
|
||||
'Content-Type: application/json',
|
||||
'Authorization: Bearer ' . $apiKey
|
||||
]);
|
||||
curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($payload));
|
||||
|
||||
$response = curl_exec($ch);
|
||||
if (curl_errno($ch)) {
|
||||
echo 'Błąd: ' . curl_error($ch);
|
||||
exit;
|
||||
}
|
||||
|
||||
|
||||
$data = json_decode($response, true);
|
||||
$htmlReport = $data['choices'][0]['message']['content'];
|
||||
|
||||
echo $htmlReport;
|
||||
<!-- Mix change alerts -->
|
||||
<h2>Alerty „mix change” (duże zmiany udziału produktu w przychodzie)</h2>
|
||||
<div class="card table-card">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width:11%">Data</th>
|
||||
<th style="width:20%">Kod</th>
|
||||
<th>Produkt</th>
|
||||
<th style="width:12%">Przychód [PLN]</th>
|
||||
<th style="width:10%">Szt.</th>
|
||||
<th style="width:10%">Udział dnia</th>
|
||||
<th style="width:14%">Porównanie</th>
|
||||
<th style="width:13%">Δ udziału</th>
|
||||
<th style="width:10%">Baseline</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>2025-07-01</td>
|
||||
<td>FR00006_250_amz_de</td>
|
||||
<td>Płyn do czyszczenia pralek automatycznych, 250 ml</td>
|
||||
<td>1 169,60</td>
|
||||
<td>136</td>
|
||||
<td><span class="pill ok">23,98%</span></td>
|
||||
<td>vs mediana miesiąca</td>
|
||||
<td><span class="pill ok">+19,99 pp</span></td>
|
||||
<td>3,99%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-02</td>
|
||||
<td>SECO002</td>
|
||||
<td>ECO Wilgotne Ściereczki do ekranów</td>
|
||||
<td>1 826,52</td>
|
||||
<td>372</td>
|
||||
<td><span class="pill ok">24,93%</span></td>
|
||||
<td>vs mediana miesiąca</td>
|
||||
<td><span class="pill ok">+12,17 pp</span></td>
|
||||
<td>12,75%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-02</td>
|
||||
<td>RE00094</td>
|
||||
<td>Płyn do ekranów TABLET/SMARTFON/LCD/PLASMA, 250 ml</td>
|
||||
<td>1 048,56</td>
|
||||
<td>204</td>
|
||||
<td><span class="pill warn">14,31%</span></td>
|
||||
<td>vs mediana miesiąca</td>
|
||||
<td><span class="pill warn">+6,55 pp</span></td>
|
||||
<td>7,76%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-09</td>
|
||||
<td>ICL-6550-INT</td>
|
||||
<td>Compressed gas duster, 400 ml</td>
|
||||
<td>3 494,40</td>
|
||||
<td>624</td>
|
||||
<td><span class="pill warn">10,09%</span></td>
|
||||
<td>vs mediana miesiąca</td>
|
||||
<td><span class="pill warn">+6,87 pp</span></td>
|
||||
<td>3,23%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-09</td>
|
||||
<td>ICL-6575-INT</td>
|
||||
<td>Compressed gas duster, 600 ml</td>
|
||||
<td>3 463,20</td>
|
||||
<td>444</td>
|
||||
<td><span class="pill">10,00%</span></td>
|
||||
<td>vs mediana miesiąca</td>
|
||||
<td><span class="pill">+4,39 pp</span></td>
|
||||
<td>5,61%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-10</td>
|
||||
<td>ICL-6550-INT</td>
|
||||
<td>Compressed gas duster, 400 ml</td>
|
||||
<td>1 881,60</td>
|
||||
<td>336</td>
|
||||
<td><span class="pill">3,05%</span></td>
|
||||
<td>vs rolling 7 dni</td>
|
||||
<td><span class="pill bad">−7,05 pp</span></td>
|
||||
<td>10,09%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-10</td>
|
||||
<td>ICL-6575-INT</td>
|
||||
<td>Compressed gas duster, 600 ml</td>
|
||||
<td>3 463,20</td>
|
||||
<td>444</td>
|
||||
<td><span class="pill">5,61%</span></td>
|
||||
<td>vs rolling 7 dni</td>
|
||||
<td><span class="pill bad">−4,39 pp</span></td>
|
||||
<td>10,00%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2025-07-17</td>
|
||||
<td>FR00006_250_amz_de</td>
|
||||
<td>Płyn do czyszczenia pralek automatycznych, 250 ml</td>
|
||||
<td>1 080,00</td>
|
||||
<td>144</td>
|
||||
<td><span class="pill">3
|
||||
|
||||
147
modules/EcmInvoiceOuts/invoice_ai_analysis.py
Normal file
147
modules/EcmInvoiceOuts/invoice_ai_analysis.py
Normal file
@@ -0,0 +1,147 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Invoice AI Analysis — end-to-end script (MySQL -> KPIs -> Mix Change -> Anomalies -> HTML)
|
||||
See previous instructions for usage and requirements.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, List, Dict
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
host: str
|
||||
port: int
|
||||
user: str
|
||||
password: str
|
||||
database: str
|
||||
date_from: str
|
||||
date_to: str
|
||||
doc_type: str
|
||||
output_html: str
|
||||
|
||||
def parse_args() -> Config:
|
||||
parser = argparse.ArgumentParser(description="Invoice AI Analysis (MySQL -> HTML)")
|
||||
parser.add_argument("--host", default=os.getenv("DB_HOST", "localhost"))
|
||||
parser.add_argument("--port", default=int(os.getenv("DB_PORT", "3306")), type=int)
|
||||
parser.add_argument("--user", default=os.getenv("DB_USER", "root"))
|
||||
parser.add_argument("--password", default=os.getenv("DB_PASS", "rootpassword"))
|
||||
parser.add_argument("--database", default=os.getenv("DB_NAME", "twinpol-mysql56"))
|
||||
parser.add_argument("--from", dest="date_from", default="2025-07-01")
|
||||
parser.add_argument("--to", dest="date_to", default="2025-08-01")
|
||||
parser.add_argument("--type", dest="doc_type", default="normal")
|
||||
parser.add_argument("--out", dest="output_html", default="report.html")
|
||||
args = parser.parse_args()
|
||||
return Config(
|
||||
host=args.host, port=args.port, user=args.user, password=args.password,
|
||||
database=args.database, date_from=args.date_from, date_to=args.date_to,
|
||||
doc_type=args.doc_type, output_html=args.output_html
|
||||
)
|
||||
|
||||
def get_engine(cfg: Config):
|
||||
url = f"mysql+pymysql://{cfg.user}:{cfg.password}@{cfg.host}:{cfg.port}/{cfg.database}?charset=utf8mb4"
|
||||
return create_engine(url, pool_recycle=3600, pool_pre_ping=True, future=True)
|
||||
|
||||
def fetch_invoices(engine, cfg: Config) -> pd.DataFrame:
|
||||
sql = text("""
|
||||
SELECT i.document_no,
|
||||
i.parent_name,
|
||||
DATE(i.register_date) AS register_date,
|
||||
ii.code,
|
||||
ii.name,
|
||||
ii.quantity,
|
||||
ii.total_netto
|
||||
FROM ecminvoiceoutitems AS ii
|
||||
JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
|
||||
WHERE i.register_date >= :date_from
|
||||
AND i.register_date < :date_to
|
||||
AND i.type = :doc_type
|
||||
""")
|
||||
with engine.connect() as con:
|
||||
df = pd.read_sql(sql, con, params={
|
||||
"date_from": cfg.date_from,
|
||||
"date_to": cfg.date_to,
|
||||
"doc_type": cfg.doc_type
|
||||
})
|
||||
df["register_date"] = pd.to_datetime(df["register_date"], errors="coerce")
|
||||
df["quantity"] = pd.to_numeric(df["quantity"], errors="coerce")
|
||||
df["total_netto"] = pd.to_numeric(df["total_netto"], errors="coerce")
|
||||
return df.dropna(subset=["register_date", "quantity", "total_netto"])
|
||||
|
||||
def compute_kpis(df: pd.DataFrame) -> Dict[str, Any]:
|
||||
total_revenue = float(df["total_netto"].sum())
|
||||
total_invoices = int(df["document_no"].nunique())
|
||||
total_units = float(df["quantity"].sum())
|
||||
aov = float(total_revenue / total_invoices) if total_invoices else 0.0
|
||||
top_products = (df.groupby(["code", "name"], as_index=False)
|
||||
.agg(total_netto=("total_netto", "sum"))
|
||||
.sort_values("total_netto", ascending=False)
|
||||
.head(5))
|
||||
top_customers = (df.groupby(["parent_name"], as_index=False)
|
||||
.agg(total_netto=("total_netto", "sum"))
|
||||
.sort_values("total_netto", ascending=False)
|
||||
.head(5))
|
||||
return {
|
||||
"total_revenue": total_revenue,
|
||||
"total_invoices": total_invoices,
|
||||
"total_units": total_units,
|
||||
"aov": aov,
|
||||
"top_products": top_products,
|
||||
"top_customers": top_customers,
|
||||
}
|
||||
|
||||
def render_html(cfg: Config, kpis: Dict[str, Any]) -> str:
|
||||
def fmt_cur(x): return f"{x:,.2f}".replace(",", " ").replace(".", ",")
|
||||
def table(headers, rows):
|
||||
th = "".join(f"<th>{h}</th>" for h in headers)
|
||||
trs = "".join("<tr>" + "".join(f"<td>{v}</td>" for v in row) + "</tr>" for row in rows)
|
||||
return f"<table><thead><tr>{th}</tr></thead><tbody>{trs}</tbody></table>"
|
||||
kpi_table = table(["Metryka", "Wartość"], [
|
||||
["Łączny przychód", f"{fmt_cur(kpis['total_revenue'])} PLN"],
|
||||
["Liczba faktur", f"{kpis['total_invoices']}"],
|
||||
["Sprzedane jednostki", f"{int(kpis['total_units']):,}".replace(",", " ")],
|
||||
["Średnia wartość faktury", f"{fmt_cur(kpis['aov'])} PLN"]
|
||||
])
|
||||
prod_table = table(["Kod", "Produkt", "Przychód"], [
|
||||
[r["code"], r["name"], fmt_cur(r["total_netto"]) + " PLN"]
|
||||
for _, r in kpis["top_products"].iterrows()
|
||||
])
|
||||
cust_table = table(["Klient", "Przychód"], [
|
||||
[r["parent_name"], fmt_cur(r["total_netto"]) + " PLN"]
|
||||
for _, r in kpis["top_customers"].iterrows()
|
||||
])
|
||||
return f"""<html><head><meta charset="utf-8"><style>
|
||||
body{{font-family:Arial, sans-serif;margin:20px}}table{{border-collapse:collapse;width:100%}}
|
||||
th,td{{border:1px solid #ccc;padding:8px;text-align:left}}
|
||||
th{{background:#f4f4f4}}
|
||||
</style></head><body>
|
||||
<h1>Analiza faktur ({cfg.date_from} → {cfg.date_to})</h1>
|
||||
<h2>KPI</h2>{kpi_table}
|
||||
<h2>Top produkty</h2>{prod_table}
|
||||
<h2>Top klienci</h2>{cust_table}
|
||||
</body></html>"""
|
||||
|
||||
def main():
|
||||
cfg = parse_args()
|
||||
engine = get_engine(cfg)
|
||||
df = fetch_invoices(engine, cfg)
|
||||
if df.empty:
|
||||
print("No data found.")
|
||||
return
|
||||
kpis = compute_kpis(df)
|
||||
html = render_html(cfg, kpis)
|
||||
with open(cfg.output_html, "w", encoding="utf-8") as f:
|
||||
f.write(html)
|
||||
print(f"Report written to {cfg.output_html}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
2
modules/EcmInvoiceOuts/invoice_summary.py
Normal file
2
modules/EcmInvoiceOuts/invoice_summary.py
Normal file
@@ -0,0 +1,2 @@
|
||||
|
||||
print("Hello")
|
||||
Reference in New Issue
Block a user