Compare commits

...

18 Commits

Author SHA1 Message Date
zzdrojewskipaw
fb9da812de generowanie raportów z wyborem preagregatów 2025-09-29 00:19:15 +02:00
zzdrojewskipaw
a1ddb05402 analiza AI rozbudowana 2025-09-07 20:41:59 +02:00
zzdrojewskipaw
60a7959e0d dodane klucza gpt5 2025-09-07 20:14:58 +02:00
zzdrojewskipaw
530f83f77b # preagregaty i analiza AI 2025-09-07 19:25:04 +02:00
zzdrojewskipaw
ccb389c446 Merge branch 'main' into #3-Analiza-sprzedaży-AI 2025-09-02 23:04:56 +02:00
zzdrojewskipaw
c750d61a0a script preaggregates.py 2025-09-02 23:04:35 +02:00
Michał Zieliński
be27602ca3 calculate delivery date fix 2025-08-30 13:15:50 +02:00
Michał Zieliński
eab6fc87b1 AI reports 2025-08-27 08:54:00 +02:00
Michał Zieliński
f3c8adb3c8 AI reports 2025-08-27 07:57:33 +02:00
zzdrojewskipaw
c3c3eed46e zmiana uprawnień do pliku CSV rw-rw-r-- 2025-08-26 21:03:40 +02:00
Michał Zieliński
113e00080a EDI, send_date fix 2025-08-26 19:12:17 +02:00
Michał Zieliński
6fdc2f11b5 summaryNew 2025-08-26 16:18:57 +02:00
Michał Zieliński
7ec831b114 summaryNew 2025-08-26 15:26:23 +02:00
Michał Zieliński
bc6348486d back to begin :) 2025-08-26 12:16:05 +02:00
Michał Zieliński
d9a932acd9 PaymentStates fix 2025-08-26 12:03:57 +02:00
Michał Zieliński
f63dd0c7e2 PaymentStates fix 2025-08-26 11:16:43 +02:00
Michał Zieliński
4f8812117c Merge remote-tracking branch 'origin/main' 2025-08-26 11:15:58 +02:00
Michał Zieliński
7e33e40fcc PaymentStates fix 2025-08-26 10:42:45 +02:00
13 changed files with 2121 additions and 835 deletions

View File

@@ -360,7 +360,7 @@ GROUP BY
ORDER BY i.register_date DESC;
",
'filename' => 'invoices_2024.csv',
],
], // invoices 2024
[
'sql' => "
SELECT
@@ -400,7 +400,101 @@ GROUP BY
ORDER BY i.register_date DESC;
",
'filename' => 'invoices_2025.csv',
],
], // invoices 2025
[
'sql' => "
SELECT
i.document_no,
i.register_date,
oi.document_no AS FV,
oi.register_date AS FV_date,
i.parent_name,
p.code,
p.name,
CASE p.group_ks
WHEN 1 THEN 'Towar handlowy'
WHEN 2 THEN 'Wyrób gotowy'
WHEN 3 THEN 'Usługi'
WHEN '530547ef-2dea-7622-843b-59d745b14c64' THEN 'Materiały'
WHEN '8451dded-710f-51c2-7ed1-60a377eaa7b7' THEN 'Surowce'
ELSE 'Nieznane'
END AS group_ks,
GROUP_CONCAT(c.name ORDER BY cb.position SEPARATOR ' | ') AS category,
ii.quantity_corrected AS quantity_correced,
ii.total_netto_corrected AS total_netto_corrected
FROM ecminvoiceouts AS i
INNER JOIN ecminvoiceoutitems AS ii ON i.id = ii.ecminvoiceout_id
INNER JOIN ecmproducts AS p ON ii.ecmproduct_id = p.id
INNER JOIN ecminvoiceouts AS oi ON oi.id = i.ecminvoiceout_id
LEFT JOIN ecmproductcategories_bean AS cb ON cb.bean_id COLLATE utf8_general_ci = p.id COLLATE utf8_general_ci
AND cb.bean_name = 'EcmProducts'
AND cb.deleted = 0
LEFT JOIN ecmproductcategories AS c ON c.id = cb.ecmproductcategory_id
WHERE i.type = 'correct' AND YEAR(i.register_date) = 2024
GROUP BY
i.document_no,
i.register_date,
oi.document_no,
oi.register_date,
i.parent_name,
p.code,
p.name,
p.group_ks,
ii.quantity,
ii.price_netto,
ii.quantity_corrected,
ii.total_netto_corrected
ORDER BY i.register_date DESC;
",
'filename' => 'correct_invoices_2024.csv',
], // correct invoices 2024
[
'sql' => "
SELECT
i.document_no,
i.register_date,
oi.document_no AS FV,
oi.register_date AS FV_date,
i.parent_name,
p.code,
p.name,
CASE p.group_ks
WHEN 1 THEN 'Towar handlowy'
WHEN 2 THEN 'Wyrób gotowy'
WHEN 3 THEN 'Usługi'
WHEN '530547ef-2dea-7622-843b-59d745b14c64' THEN 'Materiały'
WHEN '8451dded-710f-51c2-7ed1-60a377eaa7b7' THEN 'Surowce'
ELSE 'Nieznane'
END AS group_ks,
GROUP_CONCAT(c.name ORDER BY cb.position SEPARATOR ' | ') AS category,
ii.quantity_corrected AS quantity_correced,
ii.total_netto_corrected AS total_netto_corrected
FROM ecminvoiceouts AS i
INNER JOIN ecminvoiceoutitems AS ii ON i.id = ii.ecminvoiceout_id
INNER JOIN ecmproducts AS p ON ii.ecmproduct_id = p.id
INNER JOIN ecminvoiceouts AS oi ON oi.id = i.ecminvoiceout_id
LEFT JOIN ecmproductcategories_bean AS cb ON cb.bean_id COLLATE utf8_general_ci = p.id COLLATE utf8_general_ci
AND cb.bean_name = 'EcmProducts'
AND cb.deleted = 0
LEFT JOIN ecmproductcategories AS c ON c.id = cb.ecmproductcategory_id
WHERE i.type = 'correct' AND YEAR(i.register_date) = 2025
GROUP BY
i.document_no,
i.register_date,
oi.document_no,
oi.register_date,
i.parent_name,
p.code,
p.name,
p.group_ks,
ii.quantity,
ii.price_netto,
ii.quantity_corrected,
ii.total_netto_corrected
ORDER BY i.register_date DESC;
",
'filename' => 'correct_invoices_2025.csv',
], // correct invoices 2025
[
'sql' => "
SELECT
@@ -440,7 +534,7 @@ GROUP BY
ORDER BY i.register_date DESC;
",
'filename' => 'ecommerce_invoices_2024.csv',
],
], // ecommerce invoices 2024
[
'sql' => "
SELECT
@@ -480,7 +574,7 @@ GROUP BY
ORDER BY i.register_date DESC;
",
'filename' => 'ecommerce_invoices_2025.csv',
],
], // ecommerce invoices 2025
[
'sql' => "
SELECT
@@ -492,7 +586,83 @@ ORDER BY i.register_date DESC;
JOIN ecmstocks AS s ON ss.stock_id = s.id
ORDER BY quantity + 0 DESC;",
'filename' => 'stocks.csv',
],
], // stocks
[
'sql' => "
SELECT
i.document_no,
i.register_date,
p.code,
p.name,
CASE p.group_ks
WHEN 1 THEN 'Towar handlowy'
WHEN 2 THEN 'Wyrób gotowy'
WHEN 3 THEN 'Surowiec'
WHEN 4 THEN 'Usługa'
ELSE 'Nieznane'
END AS group_ks,
GROUP_CONCAT(c.name ORDER BY cb.position SEPARATOR ' | ') AS category,
s.name AS stock,
ii.quantity
FROM ecmstockdocinsideouts AS i
INNER JOIN ecmstockdocinsideoutitems AS ii ON i.id = ii.ecmstockdocinsideout_id
INNER JOIN ecmproducts AS p ON ii.ecmproduct_id = p.id
INNER JOIN ecmstocks AS s ON i.stock_id = s.id
LEFT JOIN ecmproductcategories_bean AS cb ON cb.bean_id COLLATE utf8_general_ci = p.id COLLATE utf8_general_ci
AND cb.bean_name = 'EcmProducts'
AND cb.deleted = 0
LEFT JOIN ecmproductcategories AS c ON c.id = cb.ecmproductcategory_id
WHERE YEAR(i.register_date) = 2025
GROUP BY
i.document_no,
i.register_date,
p.code,
p.name,
p.group_ks,
s.name,
ii.quantity
ORDER BY i.register_date DESC;
",
'filename' => 'rw_2025.csv',
], // rw 2025
[
'sql' => "
SELECT
i.document_no,
i.register_date,
p.code,
p.name,
CASE p.group_ks
WHEN 1 THEN 'Towar handlowy'
WHEN 2 THEN 'Wyrób gotowy'
WHEN 3 THEN 'Surowiec'
WHEN 4 THEN 'Usługa'
ELSE 'Nieznane'
END AS group_ks,
GROUP_CONCAT(c.name ORDER BY cb.position SEPARATOR ' | ') AS category,
s.name AS stock,
ii.quantity
FROM ecmstockdocinsideouts AS i
INNER JOIN ecmstockdocinsideoutitems AS ii ON i.id = ii.ecmstockdocinsideout_id
INNER JOIN ecmproducts AS p ON ii.ecmproduct_id = p.id
INNER JOIN ecmstocks AS s ON i.stock_id = s.id
LEFT JOIN ecmproductcategories_bean AS cb ON cb.bean_id COLLATE utf8_general_ci = p.id COLLATE utf8_general_ci
AND cb.bean_name = 'EcmProducts'
AND cb.deleted = 0
LEFT JOIN ecmproductcategories AS c ON c.id = cb.ecmproductcategory_id
WHERE YEAR(i.register_date) = 2024
GROUP BY
i.document_no,
i.register_date,
p.code,
p.name,
p.group_ks,
s.name,
ii.quantity
ORDER BY i.register_date DESC;
",
'filename' => 'rw_2024.csv',
], // rw 2024
];
$report = [];
@@ -576,5 +746,6 @@ function exportToCSVFile($res, $fullpath, array $headers = null, $delimiter = ';
}
fclose($fp);
return ['ok'=>true, 'path'=>$fullpath, 'rows'=>$count, 'error'=>null];
$chmod_ok = @chmod($fullpath, 0664);
return ['ok'=>true, 'path'=>$fullpath, 'rows'=>$count, 'chmod'=>$chmod_ok, 'error'=>null];
}

View File

@@ -0,0 +1,713 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
analysisAI.py — pobiera dane z MySQL, liczy wyłącznie WSKAZANE preagregaty,
renderuje HTML i (opcjonalnie) dodaje analizę AI — tylko jeśli ją zaznaczysz.
Parametry CLI (z formularza PHP):
--date-from YYYY-MM-DD
--date-to YYYY-MM-DD (zamieniane wewnętrznie na +1 dzień, bo SQL ma warunek '< date_to')
--metric NAZWA (można podać wiele razy: --metric a --metric b ...)
--metrics CSV (opcjonalnie alternatywnie: --metrics a,b,c)
--ai true|false (czy uruchomić analizę AI — tylko gdy są preagregaty z danymi)
Preagregaty:
- kpis (aliasy: basic, basic_totals) — podstawowe KPI: sprzedaż, ilość, dokumenty, ASP
- daily_sales, product_summary, customer_summary, product_daily,
top10_products_by_sales, top10_customers_by_sales (z preaggregates.py)
"""
import os, sys, json, math, time, warnings, argparse, traceback, html
from datetime import date, timedelta, datetime
# (1) Wycisza ostrzeżenia urllib3 (LibreSSL / stary OpenSSL)
try:
from urllib3.exceptions import NotOpenSSLWarning
warnings.filterwarnings("ignore", category=NotOpenSSLWarning)
except Exception:
pass
# (2) Importy zewnętrzne
import requests
import mysql.connector
import pandas as pd
LOOKER_URL = "https://lookerstudio.google.com/u/0/reporting/107d4ccc-e7eb-4c38-8dce-00700b44f60e/page/ba1YF"
# ========== KONFIGURACJA KLUCZA AI ==========
API_KEY = "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"
API_KEY_HARDCODE = API_KEY
# === Import preagregatów ===
from preaggregates import serialize_for_ai
import preaggregates as pre # pre.AGGREGATORS, pre.to_df
# ========== UTILKI ==========
def html_fatal(msg, title="Błąd"):
sys.stdout.write(
'<div style="font-family:system-ui,-apple-system,Segoe UI,Roboto,Arial,sans-serif;'
'max-width:900px;margin:24px auto;padding:16px 20px;border:1px solid #fecaca;'
'border-radius:12px;background:#fff5f5;color:#991b1b;">'
f'<h3 style="margin:0 0 8px;font-size:18px;">{html.escape(title)}</h3>'
f'<pre style="white-space:pre-wrap;margin:0;">{html.escape(msg)}</pre>'
'</div>'
)
sys.exit(1)
def connect_html_or_die(cfg, label="MySQL"):
try:
return mysql.connector.connect(**cfg)
except mysql.connector.Error as e:
host = cfg.get("host"); port = cfg.get("port"); user = cfg.get("user")
base = (f"[{label}] Błąd połączenia ({host}:{port} jako '{user}').\n"
f"errno={getattr(e,'errno',None)} sqlstate={getattr(e,'sqlstate',None)}\n"
f"msg={getattr(e,'msg',str(e))}")
if os.environ.get("DEBUG"):
base += "\n\n" + traceback.format_exc()
html_fatal(base, title="Błąd połączenia MySQL")
def getenv(k, d=None):
return os.environ.get(k, d)
def last_full_month_bounds():
today_first = date.today().replace(day=1)
to_dt = today_first
prev_last = today_first - timedelta(days=1)
from_dt = prev_last.replace(day=1)
return from_dt.isoformat(), to_dt.isoformat()
def add_one_day(iso_date):
try:
return (datetime.strptime(iso_date, "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d")
except Exception:
return iso_date # w razie czego oddaj wejście
def safe_num(v, ndigits=None):
try:
f = float(v)
if not math.isfinite(f):
return None
return round(f, ndigits) if ndigits is not None else f
except Exception:
return None
def safe_date(v):
if v is None:
return None
try:
if hasattr(v, "date"):
return str(v.date())
s = str(v)
if len(s) >= 10 and s[4] == '-' and s[7] == '-':
return s[:10]
return s
except Exception:
return None
def fmt_money(v):
try:
return "{:,.2f}".format(float(v)).replace(",", " ").replace(".", ",")
except Exception:
return str(v)
def compact_table(table, limit=30):
out = []
if not table:
return out
lim = int(limit)
for i, row in enumerate(table):
if i >= lim: break
new = {}
for k, v in row.items():
if isinstance(v, float):
new[k] = round(v, 6) if math.isfinite(v) else None
else:
new[k] = v
out.append(new)
return out
def call_openai_chat(api_key, model, system_prompt, user_payload_json,
temperature=0.3, connect_timeout=10, read_timeout=90, max_retries=3):
url = "https://api.openai.com/v1/chat/completions"
headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"}
body = {
"model": model,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": "Dane (JSON):\n\n" + user_payload_json},
],
"temperature": temperature,
}
last_err = None
for attempt in range(1, int(max_retries) + 1):
try:
r = requests.post(url, headers=headers, json=body, timeout=(connect_timeout, read_timeout))
if 200 <= r.status_code < 300:
data = r.json()
return data.get("choices", [{}])[0].get("message", {}).get("content", "")
last_err = RuntimeError("OpenAI HTTP {}: {}".format(r.status_code, r.text))
except requests.exceptions.RequestException as e:
last_err = e
time.sleep(min(2 ** attempt, 10))
raise RuntimeError("OpenAI request failed: {}".format(last_err))
def html_table(records, title=None, max_rows=20):
if not records:
return '<div class="empty">Brak danych</div>'
cols = list(records[0].keys())
body_rows = records[:max_rows]
thead = "".join("<th>{}</th>".format(c) for c in cols)
trs = []
for r in body_rows:
tds = []
for c in cols:
val = r.get(c, "")
if isinstance(val, (int, float)):
if any(x in c.lower() for x in ("sales", "total", "netto", "value", "asp", "qty", "quantity", "share", "change")):
tds.append('<td class="num">{}</td>'.format(fmt_money(val) if "sales" in c.lower() or "total" in c.lower() or "netto" in c.lower() else val))
else:
tds.append('<td class="num">{}</td>'.format(val))
else:
tds.append('<td>{}</td>'.format(val))
trs.append("<tr>{}</tr>".format("".join(tds)))
cap = '<div class="tbl-title">{}</div>'.format(title) if title else ""
return (
cap +
'<div class="tbl-wrap"><table class="tbl">'
'<thead><tr>{}</tr></thead><tbody>{}</tbody></table></div>'.format(thead, "".join(trs))
)
def render_report_html(period_label, kpis, parts, ai_section, model_alias):
css = (
"font-family:system-ui,-apple-system,Segoe UI,Roboto,Arial,sans-serif;"
"max-width:1200px;margin:24px auto;padding:16px 20px;border:1px solid #e5e7eb;"
"border-radius:12px;background:#fff;color:#111827"
)
kpi_item = (
'<div class="kpi"><div class="kpi-label">{label}</div>'
'<div class="kpi-value">{value}</div></div>'
)
kpi_html = "".join(kpi_item.format(label=lbl, value=val) for (lbl, val) in kpis)
sections_html = "".join(parts)
if ai_section and not ai_section.lstrip().startswith("<div"):
ai_section = '<div class="ai-section">{}</div>'.format(ai_section)
return f"""
<div style="{css}">
<h2 style="margin:0 0 12px;font-size:22px;">Raport sprzedaży — {period_label}</h2>
<div style="display:grid;grid-template-columns:repeat(4,minmax(0,1fr));gap:12px;margin:12px 0 20px;">
{kpi_html}
</div>
{sections_html if sections_html.strip() else '<div class="empty">Nie wybrano żadnych preagregatów — brak sekcji do wyświetlenia.</div>'}
<div style="margin-top:20px;border-top:1px solid #e5e7eb;padding-top:16px;">
<h3 style="margin:0 0 8px;font-size:18px;">Analiza i rekomendacje{(' (AI · ' + model_alias + ')') if model_alias else ''}</h3>
{ai_section if ai_section else '<div style="color:#6b7280">Analiza AI wyłączona lub brak danych.</div>'}
</div>
<!-- STOPKA z linkiem do Looker Studio -->
<div style="margin-top:20px;border-top:1px dashed #e5e7eb;padding-top:12px;display:flex;justify-content:flex-end;">
<a href="{LOOKER_URL}" target="_blank" rel="noopener"
style="text-decoration:none;padding:8px 12px;border:1px solid #d1d5db;border-radius:8px;
background:#f9fafb;color:#111827;font-weight:600;">
→ Otwórz pełny raport w Looker Studio
</a>
</div>
</div>
<style>
.kpi {{background:#f8fafc;border:1px solid #e5e7eb;border-radius:10px;padding:12px;}}
.kpi-label {{font-size:12px;color:#6b7280;margin-bottom:4px;}}
.kpi-value {{font-size:18px;font-weight:700;}}
.tbl-title {{font-weight:600;margin:16px 0 8px;font-size:15px;}}
.tbl-wrap {{overflow-x:auto;border:1px solid #e5e7eb;border-radius:8px;}}
.tbl {{border-collapse:collapse;width:100%;font-size:14px;}}
.tbl thead th {{text-align:left;background:#f3f4f6;padding:8px;border-bottom:1px solid #e5e7eb;white-space:nowrap;}}
.tbl tbody td {{padding:8px;border-bottom:1px solid #f3f4f6;vertical-align:top;}}
.tbl td.num {{text-align:right;white-space:nowrap;}}
.empty {{color:#6b7280;font-style:italic;margin:8px 0;}}
.ai-section {{background:#f8fafc;border:1px solid #e5e7eb;border-radius:10px;padding:12px;}}
</style>
"""
# ========== UPSerTY DO REPORTING (jak u Ciebie) ==========
def _ensure_rank_and_share(items, key_sales="sales"):
if not items: return
total_sales = sum((x.get(key_sales) or 0) for x in items)
sorted_items = sorted(
items,
key=lambda x: ((x.get(key_sales) or 0), str(x.get("product_code") or x.get("customer_name") or "")),
reverse=True
)
rank_map, rank = {}, 1
for x in sorted_items:
key = x.get("product_code") or x.get("customer_name") or ""
if key not in rank_map:
rank_map[key] = rank
rank += 1
for x in items:
key = x.get("product_code") or x.get("customer_name") or ""
if not x.get("rank_in_period"):
x["rank_in_period"] = rank_map.get(key, 0)
if "mix_share_sales" not in x:
x["mix_share_sales"] = ((x.get(key_sales) or 0) / total_sales) if total_sales else 0.0
def upsert_daily_sales(cur, daily):
if not daily: return 0
sql = """
INSERT INTO reporting_daily_sales
(period_date, qty, sales, docs, asp, sales_rolling7, sales_dod_pct)
VALUES (%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
asp=VALUES(asp), sales_rolling7=VALUES(sales_rolling7), sales_dod_pct=VALUES(sales_dod_pct),
generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in daily:
period_date = safe_date(r.get("register_date") or r.get("period_date") or r.get("date"))
rows.append((
period_date,
safe_num(r.get("qty")),
safe_num(r.get("sales")),
safe_num(r.get("docs")),
safe_num(r.get("asp"), 6),
safe_num(r.get("sales_rolling7"), 6),
safe_num(r.get("sales_pct_change_dod") or r.get("sales_dod_pct"), 6),
))
cur.executemany(sql, rows)
return len(rows)
def upsert_product_summary(cur, prod, period_from, period_to):
if not prod: return 0
_ensure_rank_and_share(prod, key_sales="sales")
sql = """
INSERT INTO reporting_product_summary
(period_start, period_end, product_code, product_name, qty, sales, docs,
asp_weighted, mix_share_sales, rank_in_period)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
asp_weighted=VALUES(asp_weighted), mix_share_sales=VALUES(mix_share_sales),
rank_in_period=VALUES(rank_in_period), generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in prod:
rows.append((
period_from, period_to,
r.get("product_code"), r.get("product_name"),
safe_num(r.get("qty")),
safe_num(r.get("sales")),
safe_num(r.get("docs")),
safe_num(r.get("asp_weighted"), 6),
safe_num(r.get("mix_share_sales"), 6),
int(r.get("rank_in_period") or 0),
))
cur.executemany(sql, rows)
return len(rows)
def upsert_customer_summary(cur, cust, period_from, period_to):
if not cust: return 0
_ensure_rank_and_share(cust, key_sales="sales")
sql = """
INSERT INTO reporting_customer_summary
(period_start, period_end, customer_name, qty, sales, docs,
asp_weighted, mix_share_sales, rank_in_period)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), docs=VALUES(docs),
asp_weighted=VALUES(asp_weighted), mix_share_sales=VALUES(mix_share_sales),
rank_in_period=VALUES(rank_in_period), generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in cust:
rows.append((
period_from, period_to,
r.get("customer_name"),
safe_num(r.get("qty")),
safe_num(r.get("sales")),
safe_num(r.get("docs")),
safe_num(r.get("asp_weighted"), 6),
safe_num(r.get("mix_share_sales"), 6),
int(r.get("rank_in_period") or 0),
))
cur.executemany(sql, rows)
return len(rows)
def upsert_product_daily(cur, prod_daily):
if not prod_daily: return 0
sql = """
INSERT INTO reporting_product_daily
(period_date, product_code, product_name, qty, sales, asp)
VALUES (%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
qty=VALUES(qty), sales=VALUES(sales), asp=VALUES(asp),
generated_at=CURRENT_TIMESTAMP
"""
rows = []
for r in prod_daily:
period_date = safe_date(r.get("register_date") or r.get("period_date") or r.get("date"))
qty = safe_num(r.get("qty"))
sales = safe_num(r.get("sales"))
asp = safe_num((sales / qty) if (qty and sales is not None and qty != 0) else r.get("asp"), 6)
rows.append((
period_date,
r.get("product_code"),
r.get("product_name"),
qty, sales, asp
))
cur.executemany(sql, rows)
return len(rows)
# ========== ARGPARSE & LOGIKA WYBORU ==========
def parse_cli_args():
p = argparse.ArgumentParser()
p.add_argument('--date-from', dest='date_from', required=False, help='YYYY-MM-DD')
p.add_argument('--date-to', dest='date_to', required=False, help='YYYY-MM-DD (inclusive, we add +1 day internally)')
# akceptuj obie formy: wielokrotne --metric oraz (opcjonalnie) --metrics CSV
p.add_argument('--metric', dest='metric', action='append', default=[], help='Nazwa preagregatu; można podać wiele razy')
p.add_argument('--metrics', dest='metrics', action='append', default=[], help='CSV: a,b,c (można podać wiele razy)')
p.add_argument('--ai', dest='ai', choices=['true','false'], default='false')
return p.parse_args()
def collect_metric_names(args):
names = []
# z --metric (powtarzalne)
if args.metric:
names.extend([s.strip() for s in args.metric if s and s.strip()])
# z --metrics (może być kilka wystąpień; każde może być CSV)
for entry in (args.metrics or []):
if not entry:
continue
for part in str(entry).replace(';', ',').replace(' ', ',').split(','):
part = part.strip()
if part:
names.append(part)
# aliasy dla kpis
alias_map = {'basic': 'kpis', 'basic_totals': 'kpis'}
names = [alias_map.get(n, n) for n in names]
# deduplikacja z zachowaniem kolejności
seen = set()
uniq = []
for n in names:
if n not in seen:
seen.add(n)
uniq.append(n)
return uniq
def compute_selected_preaggs(rows, names):
"""
Liczy TYLKO wskazane preagregaty. ZAWSZE zwraca DataFrame'y (nigdy listy).
Obsługuje pseudo-agregat 'kpis' (podstawowe KPI).
"""
results = {}
if not names:
return results
df = pre.to_df(rows)
# kpis — pseudoagregat
def compute_kpis_df(dfx):
if dfx is None or dfx.empty:
return pd.DataFrame([{
"total_sales": 0.0,
"total_qty": 0.0,
"total_docs": 0,
"asp": None,
}])
total_sales = float(dfx["total_netto"].sum())
total_qty = float(dfx["quantity"].sum())
total_docs = int(dfx["document_no"].nunique())
asp = (total_sales / total_qty) if total_qty else None
return pd.DataFrame([{
"total_sales": total_sales,
"total_qty": total_qty,
"total_docs": total_docs,
"asp": asp,
}])
for name in names:
if name == 'kpis':
results[name] = compute_kpis_df(df)
continue
fn = pre.AGGREGATORS.get(name)
if not fn:
results[name] = pd.DataFrame() # nieznany agregat -> pusty
continue
try:
out = fn(df)
if out is None:
results[name] = pd.DataFrame()
elif hasattr(out, "copy"):
results[name] = out.copy()
else:
results[name] = pd.DataFrame(out)
except Exception:
# np. top10_* na pustych danych -> zwróć pusty wynik
results[name] = pd.DataFrame()
return results
def sanitize_serialized(serialized_dict):
"""
Jeśli jakikolwiek agregat zwrócił błąd (np. _error), zamieniamy na pustą listę.
"""
clean = {}
for k, records in (serialized_dict or {}).items():
if not records:
clean[k] = []
continue
if isinstance(records, list) and isinstance(records[0], dict) and ('_error' in records[0]):
clean[k] = []
else:
clean[k] = records
return clean
def has_any_rows(serialized_dict):
for records in (serialized_dict or {}).values():
if records: # lista niepusta
return True
return False
# ========== MAIN ==========
def main():
# --- CLI ---
args = parse_cli_args()
with_ai = (args.ai == 'true')
metric_names = collect_metric_names(args)
# --- Daty: preferuj CLI; 'date_to' inkluzywne (dodajemy +1 dzień dla SQL '<') ---
if args.date_from and args.date_to:
period_from, period_to = args.date_from, add_one_day(args.date_to)
shown_label = "{} .. {}".format(args.date_from, args.date_to)
else:
env_from, env_to = getenv("PERIOD_FROM"), getenv("PERIOD_TO")
if env_from and env_to:
period_from, period_to = env_from, env_to
# label dla czytelności: to-1d
try:
to_label = (datetime.strptime(period_to, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
except Exception:
to_label = period_to
shown_label = "{} .. {}".format(period_from, to_label)
else:
period_from, period_to = last_full_month_bounds()
# label: poprzedni pełny miesiąc
try:
to_label = (datetime.strptime(period_to, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
except Exception:
to_label = period_to
shown_label = "{} .. {}".format(period_from, to_label)
# --- DB ---
cfg = {
"host": getenv("MYSQL_HOST", "twinpol-mysql56"),
"user": getenv("MYSQL_USER", "root"),
"password": getenv("MYSQL_PASSWORD", "rootpassword"),
"database": getenv("MYSQL_DATABASE", "preDb_0dcc87940d3655fa574b253df04ca1c3"),
"port": int(getenv("MYSQL_PORT", "3306")),
}
invoice_type = getenv("INVOICE_TYPE", "normal")
# --- SQL -> rows (UWZGLĘDNIJ DATY; typ wg ENV) ---
try:
cnx = mysql.connector.connect(**cfg)
cur = cnx.cursor()
if invoice_type:
cur.execute(
"""
SELECT i.document_no,
i.parent_name,
DATE(i.register_date) AS register_date,
ii.code,
ii.name,
ii.quantity,
ii.total_netto
FROM ecminvoiceoutitems AS ii
JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
WHERE i.register_date >= %s
AND i.register_date < %s
AND i.type = %s
""",
(period_from, period_to, invoice_type),
)
else:
cur.execute(
"""
SELECT i.document_no,
i.parent_name,
DATE(i.register_date) AS register_date,
ii.code,
ii.name,
ii.quantity,
ii.total_netto
FROM ecminvoiceoutitems AS ii
JOIN ecminvoiceouts AS i ON i.id = ii.ecminvoiceout_id
WHERE i.register_date >= %s
AND i.register_date < %s
""",
(period_from, period_to),
)
rows = cur.fetchall()
cur.close()
cnx.close()
except Exception as e:
html_fatal(str(e), title="Błąd połączenia/zapytania MySQL")
# --- LICZ TYLKO WYBRANE PREAGREGATY (w tym pseudo 'kpis') ---
results = {}
serialized = {}
if metric_names:
results = compute_selected_preaggs(rows, metric_names)
serialized = serialize_for_ai(results)
serialized = sanitize_serialized(serialized) # usuń ewentualne _error -> traktuj jako puste
else:
serialized = {}
# --- ZAPIS do reporting (tylko to, co faktycznie policzyłeś) ---
try:
if serialized:
rep_cfg = {
"host": "host.docker.internal",
"port": 3307,
"user": "remote",
"password": os.environ.get("REPORTING_PASSWORD", "areiufh*&^yhdua"),
"database": "ai",
}
if os.environ.get("REPORTING_SSL_CA"):
rep_cfg["ssl_ca"] = os.environ["REPORTING_SSL_CA"]
if os.environ.get("REPORTING_SSL_CERT"):
rep_cfg["ssl_cert"] = os.environ["REPORTING_SSL_CERT"]
if os.environ.get("REPORTING_SSL_KEY"):
rep_cfg["ssl_key"] = os.environ["REPORTING_SSL_KEY"]
cnx2 = connect_html_or_die(rep_cfg, label="ReportingDB")
cur2 = cnx2.cursor()
if "daily_sales" in serialized:
upsert_daily_sales(cur2, serialized.get("daily_sales") or [])
if "product_summary" in serialized:
upsert_product_summary(cur2, serialized.get("product_summary") or [], period_from, period_to)
if "customer_summary" in serialized:
upsert_customer_summary(cur2, serialized.get("customer_summary") or [], period_from, period_to)
if "product_daily" in serialized:
upsert_product_daily(cur2, serialized.get("product_daily") or [])
cnx2.commit()
cur2.close(); cnx2.close()
except Exception as e:
sys.stderr.write(f"[reporting] ERROR: {e}\n")
# --- KPI: jeśli wybrano 'kpis' -> bierz z wyników; w przeciwnym razie spróbuj z daily_sales; inaczej zera ---
kpis = []
if "kpis" in results and isinstance(results["kpis"], pd.DataFrame) and not results["kpis"].empty:
r = results["kpis"].iloc[0]
total_sales = r.get("total_sales") or 0
total_qty = r.get("total_qty") or 0
total_docs = r.get("total_docs") or 0
asp = r.get("asp")
else:
daily = serialized.get("daily_sales") or []
total_sales = sum((x.get("sales") or 0) for x in daily) if daily else 0
total_qty = sum((x.get("qty") or 0) for x in daily) if daily else 0
total_docs = sum((x.get("docs") or 0) for x in daily) if daily else 0
asp = (total_sales / total_qty) if total_qty else None
kpis = [
("Sprzedaż (PLN)", fmt_money(total_sales)),
("Ilość (szt.)", "{:,.0f}".format(total_qty).replace(",", " ")),
("Dokumenty", "{:,.0f}".format(total_docs).replace(",", " ")),
("ASP (PLN/szt.)", fmt_money(asp) if asp is not None else ""),
]
# --- Sekcje HTML: renderuj tylko te, które policzyłeś ---
parts = []
if "top10_products_by_sales" in serialized:
parts.append(html_table(serialized.get("top10_products_by_sales") or [], title="Top 10 produktów (po sprzedaży)", max_rows=10))
if "top10_customers_by_sales" in serialized:
parts.append(html_table(serialized.get("top10_customers_by_sales") or [], title="Top 10 klientów (po sprzedaży)", max_rows=10))
if "daily_sales" in serialized:
parts.append(html_table(serialized.get("daily_sales") or [], title="Sprzedaż dzienna (skrót)", max_rows=30))
if "product_summary" in serialized:
parts.append(html_table(serialized.get("product_summary") or [], title="Podsumowanie produktów (skrót)", max_rows=30))
if "customer_summary" in serialized:
parts.append(html_table(serialized.get("customer_summary") or [], title="Podsumowanie klientów (skrót)", max_rows=30))
if "product_daily" in serialized:
parts.append(html_table(serialized.get("product_daily") or [], title="Produkt × Dzień (próbka)", max_rows=30))
# --- AI tylko gdy: --ai true ORAZ jest co najmniej jeden rekord w którymś z wybranych agregatów ---
api_key = API_KEY_HARDCODE or getenv("OPENAI_API_KEY", "")
model = getenv("OPENAI_MODEL", "gpt-4.1")
MODEL_ALIAS = {
"gpt-4.1": "GPT-4.1",
"gpt-4.1-mini": "GPT-4.1-mini",
"gpt-4o": "GPT-4o",
"gpt-4o-mini": "GPT-4o-mini",
}
model_alias = MODEL_ALIAS.get(model, model)
ai_section = ""
if with_ai and has_any_rows(serialized):
try:
ai_data = {"kpis_hint": {"period_label": shown_label}}
for name, records in serialized.items():
ai_data[name] = compact_table(records, 100)
ai_json = json.dumps(ai_data, ensure_ascii=False, separators=(",", ":"), default=str)
ai_section = call_openai_chat(
api_key=(api_key or ""),
model=model,
system_prompt=("Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez <html>/<head>/<body>). "
"Streszcz kluczowe trendy i daj 36 zaleceń. Po polsku."),
user_payload_json=ai_json,
temperature=0.3,
connect_timeout=10,
read_timeout=90,
max_retries=3,
)
except Exception as e:
err = str(e)
if "insufficient_quota" in err or "You exceeded your current quota" in err:
try:
ai_section = call_openai_chat(
api_key=(api_key or ""),
model="gpt-4.1-mini",
system_prompt=("Jesteś analitykiem sprzedaży. Zwróć TYLKO jedną sekcję HTML (bez <html>/<head>/<body>). "
"Streszcz kluczowe trendy i daj 36 zaleceń. Po polsku."),
user_payload_json=ai_json,
temperature=0.3,
connect_timeout=10,
read_timeout=90,
max_retries=2,
)
model_alias = "GPT-4.1-mini"
except Exception as ee:
ai_section = (
'<div style="color:#991b1b;background:#fff5f5;border:1px solid #fecaca;'
'padding:10px;border-radius:8px;">Brak dostępnego limitu API. {}</div>'.format(str(ee))
)
else:
ai_section = (
'<div style="color:#991b1b;background:#fff5f5;border:1px solid #fecaca;'
'padding:10px;border-radius:8px;">Błąd wywołania AI: {}</div>'.format(err)
)
else:
ai_section = '<div style="color:#6b7280">Analiza AI wyłączona lub brak wybranych danych.</div>'
model_alias = ""
# --- Finalny HTML ---
report_html = render_report_html(
period_label=shown_label,
kpis=kpis,
parts=parts,
ai_section=ai_section,
model_alias=(model_alias if (with_ai and has_any_rows(serialized)) else "")
)
sys.stdout.write(report_html)
if __name__ == "__main__":
main()

View File

@@ -1,21 +0,0 @@
<?php
// modules/EcmInvoiceOuts/ai/enqueue.php
$from = $_POST['from'] ?? null;
$to = $_POST['to'] ?? null;
$currency = $_POST['currency'] ?? 'PLN';
$axis = $_POST['axis'] ?? 'sku_id';
$label = $_POST['label'] ?? 'sku_name';
$top_n = (int)($_POST['top_n'] ?? 50);
$goal = $_POST['goal'] ?? 'porównanie Q2 vs Q1';
if (!$from || !$to) { http_response_code(400); exit('Missing from/to'); }
$base = __DIR__;
@mkdir("$base/queue", 0777, true);
$payload = compact('from','to','currency','axis','label','top_n','goal');
$id = bin2hex(random_bytes(8));
file_put_contents("$base/queue/$id.json", json_encode($payload, JSON_UNESCAPED_UNICODE));
header('Content-Type: application/json; charset=utf-8');
echo json_encode(['job_id' => $id]);

View File

@@ -0,0 +1,150 @@
# --- preagg.py ---------------------------------------------------------------
from __future__ import annotations
import pandas as pd
import numpy as np
from typing import Callable, Dict, List
# Rejestr agregatorów: name -> funkcja(df) -> DataFrame
# AGGREGATORS: Dict[str, Callable[[pd.DataFrame], pd.DataFrame]] = {}
AGGREGATORS = {}
def aggregator(name: str):
"""Dekorator do łatwego rejestrowania nowych agregatorów."""
def _wrap(func: Callable[[pd.DataFrame], pd.DataFrame]):
AGGREGATORS[name] = func
return func
return _wrap
def to_df(rows: List[tuple]) -> pd.DataFrame:
"""Konwersja rows -> DataFrame (dopasuj nazwy kolumn do SELECT-a)."""
cols = [
"document_no",
"customer_name", # i.parent_name
"register_date", # DATE(i.register_date)
"product_code", # ii.code
"product_name", # ii.name
"quantity", # ii.quantity
"total_netto", # ii.total_netto (wartość sprzedaży netto)
]
df = pd.DataFrame(rows, columns=cols)
if df.empty:
return df
# Typy
df["register_date"] = pd.to_datetime(df["register_date"])
df["quantity"] = pd.to_numeric(df["quantity"], errors="coerce").fillna(0.0)
df["total_netto"] = pd.to_numeric(df["total_netto"], errors="coerce").fillna(0.0)
# ASP (Average Selling Price) średnia cena pozycji
# Uwaga: ASP lepiej liczyć ważoną średnią w agregatach; tu to „unit price” na pozycji.
df["unit_price"] = np.where(df["quantity"] != 0, df["total_netto"] / df["quantity"], np.nan)
return df
# ------------------- Wbudowane agregatory (możesz dopisywać kolejne) -------------------
@aggregator("daily_sales")
def daily_sales(df: pd.DataFrame) -> pd.DataFrame:
"""Dzienna sprzedaż: ilość, wartość, liczba dokumentów, ASP ważony."""
if df.empty:
return df
g = df.groupby(pd.Grouper(key="register_date", freq="D"))
out = g.agg(
qty=("quantity", "sum"),
sales=("total_netto", "sum"),
docs=("document_no", "nunique"),
).reset_index()
# ASP ważony (sales / qty)
out["asp"] = np.where(out["qty"] != 0, out["sales"] / out["qty"], np.nan)
# Zmiana d/d
out["sales_pct_change_dod"] = out["sales"].pct_change()
# Rolling 7
out["sales_rolling7"] = out["sales"].rolling(7, min_periods=1).mean()
return out
@aggregator("product_summary")
def product_summary(df: pd.DataFrame) -> pd.DataFrame:
"""Podsumowanie po produkcie."""
if df.empty:
return df
g = df.groupby(["product_code", "product_name"], as_index=False).agg(
qty=("quantity", "sum"),
sales=("total_netto", "sum"),
docs=("document_no", "nunique"),
)
g["asp_weighted"] = np.where(g["qty"] != 0, g["sales"] / g["qty"], np.nan)
# Udział w koszyku (mix % po wartości)
total_sales = g["sales"].sum()
g["mix_share_sales"] = np.where(total_sales > 0, g["sales"] / total_sales, 0.0)
return g.sort_values("sales", ascending=False)
@aggregator("customer_summary")
def customer_summary(df: pd.DataFrame) -> pd.DataFrame:
"""Podsumowanie po kliencie."""
if df.empty:
return df
g = df.groupby(["customer_name"], as_index=False).agg(
qty=("quantity", "sum"),
sales=("total_netto", "sum"),
docs=("document_no", "nunique"),
distinct_products=("product_code", "nunique"),
)
g["asp_weighted"] = np.where(g["qty"] != 0, g["sales"] / g["qty"], np.nan)
return g.sort_values("sales", ascending=False)
@aggregator("product_daily")
def product_daily(df: pd.DataFrame) -> pd.DataFrame:
"""Dzienna sprzedaż per produkt (przydatne do trendów/rollingów w AI)."""
if df.empty:
return df
g = (df
.groupby([pd.Grouper(key="register_date", freq="D"), "product_code", "product_name"], as_index=False)
.agg(qty=("quantity", "sum"),
sales=("total_netto", "sum")))
# Rolling 7 per produkt
g = g.sort_values(["product_code", "register_date"])
g["sales_rolling7"] = g.groupby("product_code")["sales"].transform(lambda s: s.rolling(7, min_periods=1).mean())
g["sales_pct_change_dod"] = g.groupby("product_code")["sales"].pct_change()
return g
@aggregator("top10_products_by_sales")
def top10_products_by_sales(df: pd.DataFrame) -> pd.DataFrame:
"""Top 10 produktów po wartości sprzedaży (okres z wejścia)."""
base = AGGREGATORS["product_summary"](df)
return base.nlargest(10, "sales")
@aggregator("top10_customers_by_sales")
def top10_customers_by_sales(df: pd.DataFrame) -> pd.DataFrame:
"""Top 10 klientów po wartości sprzedaży."""
base = AGGREGATORS["customer_summary"](df)
return base.nlargest(10, "sales")
# ------------------- Runner -------------------
def compute_preaggregates(rows: List[tuple]) -> dict[str, pd.DataFrame]:
#def compute_preaggregates(rows):
"""Główny punkt wejścia: rows -> df -> uruchom wszystkie agregatory."""
df = to_df(rows)
# results: dict[str, pd.DataFrame] = {}
results = {}
for name, fn in AGGREGATORS.items():
try:
results[name] = fn(df).copy()
except Exception as e:
# Niech agregat nie wysadza całości zapisz pusty DF + info
results[name] = pd.DataFrame({"_error": [str(e)], "_aggregator": [name]})
return results
def serialize_for_ai(results: dict[str, pd.DataFrame]) -> dict[str, list[dict]]:
"""
Konwersja wyników do lekkiego JSON-a (listy rekordów),
który łatwo przekazać do modelu AI lub zapisać do pliku.
"""
# out: dict[str, list[dict]] = {}
out = {}
for name, df in results.items():
if df is None or df.empty:
out[name] = []
else:
# zaokrąglij liczby dla czytelności (opcjonalnie)
df2 = df.copy()
for c in df2.select_dtypes(include=[np.number]).columns:
df2[c] = df2[c].round(6)
out[name] = df2.to_dict(orient="records")
return out

View File

@@ -1,12 +0,0 @@
<?php
// modules/EcmInvoiceOuts/ai/result.php
$base = __DIR__;
$files = glob("$base/out/*.json");
rsort($files);
$latest = $files[0] ?? null;
if (!$latest) { http_response_code(404); exit('Brak wyników'); }
$payload = json_decode(file_get_contents($latest), true);
header('Content-Type: application/json; charset=utf-8');
echo json_encode($payload, JSON_UNESCAPED_UNICODE | JSON_PRETTY_PRINT);

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env python3
import os
import sys
try:
import mysql.connector
except Exception as e:
sys.stderr.write("MySQL connector not available: %s\n" % e)
sys.exit(1)
def getenv(key, default=None):
return os.environ.get(key, default)
def main():
cfg = {
"host": getenv("MYSQL_HOST", "twinpol-mysql56"),
"user": getenv("MYSQL_USER", "root"),
"password": getenv("MYSQL_PASSWORD", "rootpassword"),
"database": getenv("MYSQL_DATABASE", "preDb_0dcc87940d3655fa574b253df04ca1c3"),
"port": int(getenv("MYSQL_PORT", "3306")),
}
try:
cnx = mysql.connector.connect(**cfg)
cur = cnx.cursor()
cur.execute("SELECT COUNT(*) FROM ecminvoiceouts WHERE YEAR(register_date)=2025")
row = cur.fetchone()
count = int(row[0]) if row and row[0] is not None else 0
print(count)
cur.close()
cnx.close()
except Exception as e:
sys.stderr.write("Query error: %s\n" % e)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -1,141 +0,0 @@
# worker.py
import os, json, io, uuid
import datetime as dt
from typing import Dict, Any, List
import polars as pl
import pymysql
from tenacity import retry, wait_exponential, stop_after_attempt
from dotenv import load_dotenv
load_dotenv()
AI_MODEL = os.getenv("AI_MODEL", "gpt-5-pro")
AI_API_KEY = os.getenv("AI_API_KEY")
MYSQL_CONF = dict(
host=os.getenv("MYSQL_HOST", "localhost"),
user=os.getenv("MYSQL_USER", "root"),
password=os.getenv("MYSQL_PASSWORD", ""),
database=os.getenv("MYSQL_DB", "sales"),
cursorclass=pymysql.cursors.DictCursor,
)
def mysql_query(sql: str, params: tuple = ()) -> pl.DataFrame:
conn = pymysql.connect(**MYSQL_CONF)
try:
with conn.cursor() as cur:
cur.execute(sql, params)
rows = cur.fetchall()
finally:
conn.close()
return pl.from_dicts(rows)
def to_csv(df: pl.DataFrame) -> str:
buf = io.StringIO()
df.write_csv(buf)
return buf.getvalue()
SQL_KPIS_DAILY = """
SELECT DATE(invoice_date) AS d,
SUM(net_amount) AS revenue,
SUM(quantity) AS qty,
ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
ROUND(100*SUM(discount_amount)/NULLIF(SUM(gross_amount),0), 2) AS discount_pct
FROM fact_invoices
WHERE invoice_date BETWEEN %s AND %s
GROUP BY 1
ORDER BY 1;
"""
SQL_TOP_SEGMENTS = """
SELECT {axis} AS key,
ANY_VALUE({label}) AS label,
SUM(net_amount) AS revenue,
SUM(quantity) AS qty,
ROUND(100*SUM(net_amount - cost_amount)/NULLIF(SUM(net_amount),0), 2) AS gross_margin_pct,
ROUND(100*(SUM(net_amount) - LAG(SUM(net_amount)) OVER(ORDER BY 1))/
NULLIF(LAG(SUM(net_amount)) OVER(ORDER BY 1),0), 2) AS trend_30d
FROM fact_invoices
WHERE invoice_date BETWEEN DATE_SUB(%s, INTERVAL 60 DAY) AND %s
GROUP BY 1
ORDER BY revenue DESC
LIMIT %s;
"""
class AIClient:
def __init__(self, api_key: str): self.api_key = api_key
@retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
def structured_analysis(self, prompt: str, schema: Dict[str, Any]) -> Dict[str, Any]:
# TODO: PODMIEŃ na realne wywołanie modelu z "Structured Outputs"
raise NotImplementedError("Wire your model SDK here")
@retry(wait=wait_exponential(multiplier=1, min=1, max=20), stop=stop_after_attempt(6))
def batch_submit(self, ndjson_lines: List[str]) -> str:
# TODO: PODMIEŃ na rzeczywiste Batch API
raise NotImplementedError
def run_online(from_date: str, to_date: str, currency: str, axis: str, label: str, top_n: int, goal: str) -> Dict[str, Any]:
kpis = mysql_query(SQL_KPIS_DAILY, (from_date, to_date))
top = mysql_query(SQL_TOP_SEGMENTS.format(axis=axis, label=label), (from_date, to_date, top_n))
csv_blocks = ("## kpis_daily\n" + to_csv(kpis) + "\n\n" +
"## top_segments\n" + to_csv(top))
with open(os.path.join(os.path.dirname(__file__), "sales-analysis.schema.json"), "r", encoding="utf-8") as f:
schema = json.load(f)
prompt = f"""
Jesteś analitykiem sprzedaży. Otrzymasz: (a) kontekst, (b) dane.
Zwróć **wyłącznie** JSON zgodny ze schema.
Kontekst:
- Waluta: {currency}
- Zakres: {from_date}{to_date}
- Cel: {goal}
- Poziom segmentacji: {axis}
Dane (CSV):
{csv_blocks}
Wskazówki:
- Użyj danych jak są (nie wymyślaj liczb).
- W meta.scope wpisz opis zakresu i segmentacji.
- Jeśli brak anomalii anomalies: [].
- Kwoty do 2 miejsc, procenty do 1.
"""
ai = AIClient(AI_API_KEY)
result = ai.structured_analysis(prompt, schema)
out_dir = os.path.join(os.path.dirname(__file__), "out")
os.makedirs(out_dir, exist_ok=True)
out_path = os.path.join(out_dir, f"{uuid.uuid4()}.json")
with open(out_path, "w", encoding="utf-8") as f:
json.dump(result, f, ensure_ascii=False)
return {"status": "ok", "path": out_path}
def run_batch(from_date: str, to_date: str, axis: str, label: str):
# Zgodnie z blueprintem generujemy linie NDJSON (skrót; pełny wariant w PDF)
# TODO: dodać realne wywołania batch_submit i zapisać ID/stan
raise NotImplementedError("Implement batch per blueprint")
if __name__ == "__main__":
import argparse
p = argparse.ArgumentParser()
sub = p.add_subparsers(dest="cmd")
o = sub.add_parser("online")
o.add_argument("from_date"); o.add_argument("to_date"); o.add_argument("currency")
o.add_argument("axis", choices=["sku_id","client_id","region_code"])
o.add_argument("label"); o.add_argument("top_n", type=int, nargs="?", default=50)
o.add_argument("goal")
b = sub.add_parser("batch")
b.add_argument("from_date"); b.add_argument("to_date"); b.add_argument("axis"); b.add_argument("label")
args = p.parse_args()
if args.cmd == "online":
print(run_online(args.from_date, args.to_date, args.currency, args.axis, args.label, args.top_n, args.goal))
elif args.cmd == "batch":
print(run_batch(args.from_date, args.to_date, args.axis, args.label))
else:
p.print_help()

View File

@@ -0,0 +1,177 @@
<?php
/**
* report_form.php — formularz + uruchomienie analysisAI.py z parametrami
* ZGODNE z PHP 5.6 i Sugar 6 (wyciszone E_STRICT/E_DEPRECATED/NOTICE).
*/
// --- wycisz „hałas” Sugar CRM ---
error_reporting(E_ALL & ~E_STRICT & ~E_DEPRECATED & ~E_NOTICE);
ini_set('display_errors', '0');
// (opcjonalnie) loguj do pliku
// ini_set('log_errors', '1');
// ini_set('error_log', '/var/log/php_form_errors.log');
// --- ŚCIEŻKI (dostosuj do swojej instalacji) ---
$python = '/usr/local/bin/python3';
$script = '/var/www/html/modules/EcmInvoiceOuts/ai/analysisAI.py';
$baseDir = dirname($script);
// --- domyślne wartości pól ---
$defaultDateTo = date('Y-m-d');
$defaultDateFrom = date('Y-m-d', strtotime('-7 days'));
// --- zbieranie POST (PHP 5.6 friendly) ---
$submitted = (isset($_SERVER['REQUEST_METHOD']) && $_SERVER['REQUEST_METHOD'] === 'POST');
$post_date_from = isset($_POST['date_from']) ? $_POST['date_from'] : $defaultDateFrom;
$post_date_to = isset($_POST['date_to']) ? $_POST['date_to'] : $defaultDateTo;
$post_preaggs = (isset($_POST['preaggs']) && is_array($_POST['preaggs'])) ? $_POST['preaggs'] : array();
$post_with_ai = !empty($_POST['with_ai']);
function h($v) { return htmlspecialchars($v, ENT_QUOTES, 'UTF-8'); }
function is_valid_date_yyyy_mm_dd($d) {
return (bool)preg_match('/^\d{4}-\d{2}-\d{2}$/', $d);
}
// --- wykonanie skryptu Pythona, jeśli formularz został wysłany ---
$ran = false;
$ok = false;
$rc = 0;
$out = '';
$err = '';
if ($submitted) {
// prosta walidacja dat
if (!is_valid_date_yyyy_mm_dd($post_date_from) || !is_valid_date_yyyy_mm_dd($post_date_to)) {
$err = "Nieprawidłowy format daty. Użyj YYYY-MM-DD.";
$ran = true;
} else {
// zbuduj argumenty
$args = array(
'--date-from', $post_date_from,
'--date-to', $post_date_to,
'--ai', ($post_with_ai ? 'true' : 'false')
);
if (!empty($post_preaggs)) {
// CSV z zaznaczonych preagregatów
$args[] = '--metrics';
$args[] = implode(',', $post_preaggs);
}
// komenda: przejdź do katalogu skryptu, uruchom pythona; zbierz stdout+stderr
$cmd = 'cd ' . escapeshellarg($baseDir) . ' && ' .
escapeshellcmd($python) . ' ' . escapeshellarg($script);
foreach ($args as $a) {
$cmd .= ' ' . escapeshellarg($a);
}
$output = array();
$returnVar = 0;
exec($cmd . ' 2>&1', $output, $returnVar);
$ran = true;
$rc = $returnVar;
$out = implode("\n", $output);
$ok = ($returnVar === 0);
if (!$ok && $err === '') {
$err = "Błąd uruchamiania skryptu Python (kod: " . $rc . "):\n" . $out;
}
}
}
?>
<!doctype html>
<html lang="pl">
<head>
<meta charset="utf-8">
<title>Generator raportu sprzedaży</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body { font: 14px/1.4 system-ui, Arial, sans-serif; padding: 20px; }
fieldset { margin-bottom: 16px; padding: 12px; border-radius: 6px; border:1px solid #e5e5e5; }
.row { display: flex; gap: 16px; flex-wrap: wrap; }
.col { min-width: 220px; flex: 1; }
label { display:block; margin: 6px 0; }
input[type="date"], button { padding: 6px 10px; font-size:14px; }
button { margin-top: 10px; cursor: pointer; border:1px solid #0a66c2; background:#0a66c2; color:#fff; border-radius:8px; }
.pill { display:inline-block; padding:2px 8px; border-radius:999px; background:#eee; margin:4px 6px 0 0; }
.out { white-space: normal; background: #fff; border:1px solid #ddd; padding:12px; border-radius:6px; }
.error { white-space: pre-wrap; background: #fff3f3; border:1px solid #f3c2c2; padding:12px; border-radius:6px; color:#b00020; }
.muted { color:#666; }
</style>
</head>
<body>
<h1>Raport sprzedaży — parametry</h1>
<form method="post">
<!-- zakres dat -->
<fieldset>
<legend>Zakres dat</legend>
<div class="row">
<div class="col">
<label>Data od:
<input type="date" name="date_from" value="<?php echo h($post_date_from); ?>" required>
</label>
</div>
<div class="col">
<label>Data do:
<input type="date" name="date_to" value="<?php echo h($post_date_to); ?>" required>
</label>
</div>
</div>
</fieldset>
<!-- preagregaty -->
<fieldset>
<legend>Preagregaty do analizy</legend>
<label><input type="checkbox" name="preaggs[]" value="daily_sales" <?php echo in_array('daily_sales', $post_preaggs, true) ? 'checked' : ''; ?>> Dzienne sprzedaże</label>
<label><input type="checkbox" name="preaggs[]" value="product_summary" <?php echo in_array('product_summary', $post_preaggs, true) ? 'checked' : ''; ?>> Podsumowanie produktów</label>
<label><input type="checkbox" name="preaggs[]" value="customer_summary" <?php echo in_array('customer_summary', $post_preaggs, true) ? 'checked' : ''; ?>> Podsumowanie klientów</label>
<label><input type="checkbox" name="preaggs[]" value="product_daily" <?php echo in_array('product_daily', $post_preaggs, true) ? 'checked' : ''; ?>> Sprzedaż produktu dziennie</label>
<label><input type="checkbox" name="preaggs[]" value="top10_products_by_sales" <?php echo in_array('top10_products_by_sales', $post_preaggs, true) ? 'checked' : ''; ?>> Top10 produktów</label>
<label><input type="checkbox" name="preaggs[]" value="top10_customers_by_sales"<?php echo in_array('top10_customers_by_sales', $post_preaggs, true) ? 'checked' : ''; ?>> Top10 klientów</label>
</fieldset>
<!-- AI -->
<fieldset>
<legend>Analiza AI</legend>
<label>
<input type="checkbox" name="with_ai" <?php echo $post_with_ai ? 'checked' : ''; ?>> Dołącz analizę AI
</label>
</fieldset>
<button type="submit">Generuj</button>
</form>
<?php if ($submitted): ?>
<hr>
<h2>Użyte parametry</h2>
<p>
<span class="pill">Od: <?php echo h($post_date_from); ?></span>
<span class="pill">Do: <?php echo h($post_date_to); ?></span>
<span class="pill">AI: <?php echo $post_with_ai ? 'tak' : 'nie'; ?></span>
</p>
<p>Preagregaty:
<?php
if (!empty($post_preaggs)) {
foreach ($post_preaggs as $p) {
echo '<span class="pill">'.h($p).'</span>';
}
} else {
echo '<span class="muted">brak</span>';
}
?>
</p>
<h2>Wynik analizy</h2>
<?php if (!$ok): ?>
<div class="error"><?php echo h($err); ?></div>
<?php else: ?>
<!-- Zakładamy, że Python zwraca gotowy HTML -->
<div class="out"><?php echo $out; ?></div>
<?php endif; ?>
<?php endif; ?>
</body>
</html>

View File

@@ -1,16 +1,28 @@
<?php
// Runs the Python script, waits for completion, and returns its output.
$python = '/usr/local/bin/python3';
$script = '/var/www/html/modules/EcmInvoiceOuts/ai/analysisAI.py';
$cmd = escapeshellcmd("$python $script");
$cmd = 'python3 /var/www/html/modules/EcmInvoiceOuts/ai/test.py';
// odczyt
$output = [];
$returnVar = 0;
exec($cmd . ' 2>&1', $output, $returnVar);
$body = implode("\n", $output);
// błąd Pythona
if ($returnVar !== 0) {
// pokaż błąd jako tekst
while (ob_get_level()) { ob_end_clean(); }
header_remove();
header('Content-Type: text/plain; charset=utf-8');
http_response_code(500);
echo "Error running Python script:\n" . implode("\n", $output);
echo "Error running Python script:\n".$body;
exit;
}
// Expect a single line with the count
echo trim(implode("\n", $output));
// --- WYMUSZENIE RENDEROWANIA HTML ---
while (ob_get_level()) { ob_end_clean(); } // wyczyść wszystkie bufory
header_remove(); // usuń nagłówki ustawione wcześniej przez framework
header('Content-Type: text/html; charset=utf-8');
echo $body;
exit; // ZATRZYMAJ framework (np. SugarCRM), żeby nic już nie dopisywał

File diff suppressed because it is too large Load Diff

View File

@@ -131,6 +131,21 @@ function importSale($file)
$sale->register_date = date("d.m.Y", strtotime($xml->{'Order-Header'}->OrderDate));
$sale->delivery_date = date("d.m.Y", strtotime($xml->{'Order-Header'}->ExpectedDeliveryDate));
$delivery_timestamp = strtotime($xml->{'Order-Header'}->ExpectedDeliveryDate);
$delivery_day_of_week = date('N', $delivery_timestamp); // 1 = Monday, 7 = Sunday
if ($delivery_day_of_week == 1) { // Monday -> send on Friday
$send_timestamp = strtotime('-3 days', $delivery_timestamp);
} else { // send one day before
$send_timestamp = strtotime('-1 day', $delivery_timestamp);
}
$sale->send_date = date("d.m.Y", $send_timestamp);
$sale->document_no = $xml->{'Order-Header'}->OrderNumber;
$sale->document_date = date("d.m.Y", strtotime($xml->{'Order-Header'}->OrderDate));
$sale->document_due_date = date("d.m.Y", strtotime($xml->{'Order-Header'}->ExpectedDeliveryDate));
$sale->document_currency = 'PLN';
$date = new DateTime(date("d.m.Y", strtotime($xml->{'Order-Header'}->OrderDate)));
if ($sale->payment_date_days != "") {
$date->modify("+" . $sale->payment_date_days . " day");

View File

@@ -1,216 +1,217 @@
//START:
$(document).ready(
function() {
//set sizes
$("#parent_name").attr("size", "70");
$("#parent_name_copy").attr("size", "70");
$("#parent_shipping_address_name").attr("size", "70");
$("#payment_date_days").css("width", "40");
//$("#payment_method").css("width", "100");
//change parent select button
$("#btn_clr_parent_name").children().attr("src","themes/default/images/id-ff-add.png" );
$("#btn_clr_parent_name").attr("title","Dodaj" );
$("#btn_clr_parent_name").click(createAccount);
// confirm exit
window.onbeforeunload = confirmExit;
// prevent submit by enter press
lockEnter();
// parent info
$("#delivery_date").on('inputchange',function() {
calculateDate($("#delivery_date").val());
});
var previousVal;
var pollInterval = setInterval(function() {
var val = $('#parent_id').val();
if (val !== previousVal) {
if ($('#parent_id').val() == '') return;
$(".loading_panel").css("display", "block");
setTimeout(function() {
getParentInfo($("#parent_id").val(), 'Accounts');
}, 1000);
}
previousVal = val;
}, 500);
//newProduct
var previousVal2;
var prodInterval = setInterval(function() {
var val = $('#newProductId').val();
if (val !== previousVal2) {
if ($('#newProductId').val() == '') return;
$(".loading_panel").css("display", "block");
setTimeout(function() {
AddProduct(items.length-1, val);
}, 500);
}
previousVal2 = val;
}, 500);
// categories
function () {
//set sizes
$("#parent_name").attr("size", "70");
$("#parent_name_copy").attr("size", "70");
$("#parent_shipping_address_name").attr("size", "70");
$("#payment_date_days").css("width", "40");
//$("#payment_method").css("width", "100");
//change parent select button
$("#btn_clr_parent_name").children().attr("src", "themes/default/images/id-ff-add.png");
$("#btn_clr_parent_name").attr("title", "Dodaj");
$("#btn_clr_parent_name").click(createAccount);
// confirm exit
window.onbeforeunload = confirmExit;
// prevent submit by enter press
lockEnter();
$('#pdf_type').change(function () {
manageOO();
var count = $('#' + itemsTable + '_T tr').length - 1; // -1 -
// thead row
for (var index = 0; index != count; index++){
calculateRow(index);
$("#delivery_date").on('inputchange', function () {
calculateDate($("#delivery_date").val());
});
var previousVal;
var pollInterval = setInterval(function () {
var val = $('#parent_id').val();
if (val !== previousVal) {
if ($('#parent_id').val() == '') return;
$(".loading_panel").css("display", "block");
setTimeout(function () {
getParentInfo($("#parent_id").val(), 'Accounts');
}, 1000);
}
previousVal = val;
}, 500);
//newProduct
var previousVal2;
var prodInterval = setInterval(function () {
var val = $('#newProductId').val();
if (val !== previousVal2) {
if ($('#newProductId').val() == '') return;
$(".loading_panel").css("display", "block");
setTimeout(function () {
AddProduct(items.length - 1, val);
}, 500);
}
previousVal2 = val;
}, 500);
// categories
}
calculateTotal();
});
$('#pdf_type').change(function () {
manageOO();
var count = $('#' + itemsTable + '_T tr').length - 1; // -1 -
// thead row
for (var index = 0; index != count; index++) {
calculateRow(index);
getCategoriesList();
// its vat free change
$("#no_tax").change(function() {
// calculate totals
var count = $('#' + itemsTable + '_T tr').length - 1; // -1 -
// thead row
for (var index = 0; index != count; index++)
calculateRow(index);
});
// language channge
$("#ecmlanguage").change(function() {
changeLanguage();
});
removeFromValidate('EditView', 'shipping_iln');
DrawHeaders();
// stock selector
$("#stock").change(function() {
$("#stock_id").val(($("#stock :selected").val()));
});
if($("#type :selected").val()=='sales_order'){
removeFromValidate('EditView', 'invoice_date');
}
$("#type").change(function() {
if($("#type :selected").val()=='sales_order'){
removeFromValidate('EditView', 'invoice_date');
} else {
addToValidate('EditView', 'invoice_date', 'id', 'true', '');
}
});
if (($("#new_number").val() == true)
&& ($("#duplicate").val() != true) && ($("#ecmquote_id").val() =='')) {
EcmDocumentNumberGenerator_getNumberTemplate('document_no', 'EcmSales');
//sale from subpanel??
var ecp = $("#ecommerce_products").val();
if (ecp && ecp.length > 0) {
loadECommerceProducts(ecp);
} else if ($("#parent_id").val()!='') {
$(".loading_panel").css("display", "block");
setTimeout(function() {
getParentInfo($("#parent_id").val(), $(
"#parent_type :selected").val());
}, 1000);
}
$(".loading_panel").css("display", "none");
} else if (($("#new_number").val() == true)
&& ($("#duplicate").val() == true)) {
EcmDocumentNumberGenerator_getNumberTemplate('document_no', 'EcmSales');
getParentInfo($("#parent_id").val(),
$("#parent_type :selected").val());
getItems();
$(".loading_panel").css("display", "none");
} else if (($("#new_number").val() == true)
&& ($("#ecmquote_id").val() != '')) {
EcmDocumentNumberGenerator_getNumberTemplate('document_no', 'EcmSales');
getParentInfo($("#parent_id").val(),
'Accounts');
getItemsFromSale();
$(".loading_panel").css("display", "none");
} else {
getParentInfo($("#parent_id").val(),
$("#parent_type :selected").val());
getItems();
$(".loading_panel").css("display", "none");
}
// handle setItems
setITEMS = function() {
var formname = 'EditView';
if (check_form_(formname) == true) {
SetTab('ITEMS');
}
};
//payment date functions
$('#payment_date_days').css('height', '18');
$('#payment_date_days').val('0');
$('#payment_date_days').change(function() {
calculatePaymentDate();
});
// parent info
var previousVal4;
var pollInterval4 = setInterval(function() {
var val = $('#payment_date').val();
if (val !== previousVal4) {
setTimeout(function() {
calculateDateDiff();
}, 1000);
}
previousVal4 = val;
}, 1000);
// wyszukiwanie start
$('#searchProductsInput').keyup(function(e) {
if (e.keyCode == 13) {
// stronicowanie od 1 strony przy zmianie szukanego produktu
$('#searchStart').val(0);
searchProducts();
}
});
AddSearchRecord();
$(".loading_panel").css("display", "none");
});
}
calculateTotal();
});
getCategoriesList();
// its vat free change
$("#no_tax").change(function () {
// calculate totals
var count = $('#' + itemsTable + '_T tr').length - 1; // -1 -
// thead row
for (var index = 0; index != count; index++)
calculateRow(index);
});
// language channge
$("#ecmlanguage").change(function () {
changeLanguage();
});
removeFromValidate('EditView', 'shipping_iln');
DrawHeaders();
// stock selector
$("#stock").change(function () {
$("#stock_id").val(($("#stock :selected").val()));
});
if ($("#type :selected").val() == 'sales_order') {
removeFromValidate('EditView', 'invoice_date');
}
$("#type").change(function () {
if ($("#type :selected").val() == 'sales_order') {
removeFromValidate('EditView', 'invoice_date');
} else {
addToValidate('EditView', 'invoice_date', 'id', 'true', '');
}
});
if (($("#new_number").val() == true)
&& ($("#duplicate").val() != true) && ($("#ecmquote_id").val() == '')) {
EcmDocumentNumberGenerator_getNumberTemplate('document_no', 'EcmSales');
//sale from subpanel??
var ecp = $("#ecommerce_products").val();
if (ecp && ecp.length > 0) {
loadECommerceProducts(ecp);
} else if ($("#parent_id").val() != '') {
$(".loading_panel").css("display", "block");
setTimeout(function () {
getParentInfo($("#parent_id").val(), $(
"#parent_type :selected").val());
}, 1000);
}
$(".loading_panel").css("display", "none");
} else if (($("#new_number").val() == true)
&& ($("#duplicate").val() == true)) {
EcmDocumentNumberGenerator_getNumberTemplate('document_no', 'EcmSales');
getParentInfo($("#parent_id").val(),
$("#parent_type :selected").val());
getItems();
$(".loading_panel").css("display", "none");
} else if (($("#new_number").val() == true)
&& ($("#ecmquote_id").val() != '')) {
EcmDocumentNumberGenerator_getNumberTemplate('document_no', 'EcmSales');
getParentInfo($("#parent_id").val(),
'Accounts');
getItemsFromSale();
$(".loading_panel").css("display", "none");
} else {
getParentInfo($("#parent_id").val(),
$("#parent_type :selected").val());
getItems();
$(".loading_panel").css("display", "none");
}
// handle setItems
setITEMS = function () {
var formname = 'EditView';
if (check_form_(formname) == true) {
SetTab('ITEMS');
}
};
//payment date functions
$('#payment_date_days').css('height', '18');
$('#payment_date_days').val('0');
$('#payment_date_days').change(function () {
calculatePaymentDate();
});
// parent info
var previousVal4;
var pollInterval4 = setInterval(function () {
var val = $('#payment_date').val();
if (val !== previousVal4) {
setTimeout(function () {
calculateDateDiff();
}, 1000);
}
previousVal4 = val;
}, 1000);
// wyszukiwanie start
$('#searchProductsInput').keyup(function (e) {
if (e.keyCode == 13) {
// stronicowanie od 1 strony przy zmianie szukanego produktu
$('#searchStart').val(0);
searchProducts();
}
});
AddSearchRecord();
calculateDate($("#delivery_date").val());
$(".loading_panel").css("display", "none");
});
// handle save
var check_form_ = check_form;
check_form = function(formname,event) {
// zapobiega zapisywaniu dokumentu firefox bug, w przypadku nacisniecia enter w polu z autocomplete
if(event.clientY==0 && event.clientX==0){
return false;
}
window.onbeforeunload = null;
if (items.length == 0 || items[0].product_id=='') {
alert("Brak produktów");
return false;
}
// usuwa rekord pomocniczy
clearEmpty();
$(".loading_panel").css("display", "block");
// calculate totals
var count = $('#' + itemsTable + '_T tr').length - 1; // -1 - thead row
for (var index = 0; index != count; index++) {
calculateRow(index);
if (validation.recipient_code == true)
console.log('Sprawdzić kody trzeba');
}
$("#total_netto").val($("#t_netto").val());
$("#total_brutto").val($("#t_brutto").val());
$("#discount").val($("#disc").val());
$("#position_list").val(JSON.stringifyNoSecurity(items));
check_form = function (formname, event) {
// zapobiega zapisywaniu dokumentu firefox bug, w przypadku nacisniecia enter w polu z autocomplete
if (event.clientY == 0 && event.clientX == 0) {
return false;
}
window.onbeforeunload = null;
if (items.length == 0 || items[0].product_id == '') {
alert("Brak produktów");
return false;
}
// usuwa rekord pomocniczy
clearEmpty();
$(".loading_panel").css("display", "block");
// calculate totals
var count = $('#' + itemsTable + '_T tr').length - 1; // -1 - thead row
for (var index = 0; index != count; index++) {
calculateRow(index);
if (validation.recipient_code == true)
console.log('Sprawdzić kody trzeba');
}
$("#total_netto").val($("#t_netto").val());
$("#total_brutto").val($("#t_brutto").val());
$("#discount").val($("#disc").val());
$("#position_list").val(JSON.stringifyNoSecurity(items));
if (check_form_(formname) === true) {
return true;
} else {
window.onbeforeunload = confirmExit;
$(".loading_panel").css("display", "none");
return false;
}
if (check_form_(formname) === true) {
return true;
} else {
window.onbeforeunload = confirmExit;
$(".loading_panel").css("display", "none");
return false;
}
};
function confirmExit() {
return "";
return "";
}
function lockEnter() {
// prevent default
// prevent default
$(window).keydown(function(event) {
$(window).keydown(function (event) {
if (event.keyCode == 13 && $(":focus").prop('tagName')!='input') {
if (event.keyCode == 13 && $(":focus").prop('tagName') != 'input') {
event.preventDefault();
return false;
}
});
event.preventDefault();
return false;
}
});
}
function loadECommerceProducts(ecp) {

View File

@@ -24,6 +24,13 @@ function calculateDate($date){
$date = new Datetime($date);
$date->modify('-1 day');
$tmp['date']=$date->format("d.m.Y");
$tmp['date_day']=date('N', strtotime($tmp['date']));
if($tmp['date_day']==7){
$date->modify('-2 day');
} else if($tmp['date_day']==6){
$date->modify('-1 day');
}
$tmp['date']=$date->format("d.m.Y");
echo json_encode($tmp);
return '';
}